1 !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
5 ! Program to carry out conformational search of proteins in an united-residue !
8 !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
13 use control, only:tcpu
14 use io_base, only:ilen
15 use geometry, only:chainbuild
16 use control, only:dajczas
19 use compare, only: test
25 ! implicit real*8 (a-h,o-z)
26 ! include 'DIMENSIONS'
28 use MPI_data ! include 'COMMON.SETUP'
33 use MPI_data, only: me,king
36 ! include 'COMMON.TIME1'
37 ! include 'COMMON.INTERACT'
38 ! include 'COMMON.NAMES'
39 ! include 'COMMON.GEO'
40 ! include 'COMMON.HEADER'
41 ! include 'COMMON.CONTROL'
42 ! include 'COMMON.CONTACTS'
43 ! include 'COMMON.CHAIN'
44 ! include 'COMMON.VAR'
45 ! include 'COMMON.IOUNITS'
46 ! include 'COMMON.FFIELD'
47 ! include 'COMMON.REMD'
49 ! include 'COMMON.SBRIDGE'
51 real(kind=8) :: hrtime,mintime,sectime
52 character(len=64) :: text_mode_calc(-2:14)
53 text_mode_calc(-2) = 'test'
54 text_mode_calc(-1) = 'cos'
55 text_mode_calc(0) = 'Energy evaluation or minimization'
56 text_mode_calc(1) = 'Regularization of PDB structure'
57 text_mode_calc(2) = 'Threading of a sequence on PDB structures'
58 text_mode_calc(3) = 'Monte Carlo (with minimization) '
59 text_mode_calc(4) = 'Energy minimization of multiple conformations'
60 text_mode_calc(5) = 'Checking energy gradient'
61 text_mode_calc(6) = 'Entropic sampling Monte Carlo (with minimization)'
62 text_mode_calc(7) = 'Energy map'
63 text_mode_calc(8) = 'CSA calculations'
64 text_mode_calc(9) = 'Not used 9'
65 text_mode_calc(10) = 'Not used 10'
66 text_mode_calc(11) = 'Soft regularization of PDB structure'
67 text_mode_calc(12) = 'Mesoscopic molecular dynamics (MD) '
68 text_mode_calc(13) = 'Not used 13'
69 text_mode_calc(14) = 'Replica exchange molecular dynamics (REMD)'
71 ! call memmon_print_usage()
75 write(iout,*)'### LAST MODIFIED 09/03/15 15:32PM by EL'
76 if (me.eq.king) call cinfo
77 ! Read force field parameters and job setup data
80 write (iout,*) "After readrtns"
83 if (me.eq.king .or. .not. out1file) then
84 write (iout,'(2a/)') &
85 text_mode_calc(modecalc)(:ilen(text_mode_calc(modecalc))), &
87 if (minim) write (iout,'(a)') &
88 'Conformations will be energy-minimized.'
89 write (iout,'(80(1h*)/)')
93 if (modecalc.eq.-2) then
96 else if (modecalc.eq.-1) then
97 write(iout,*) "call check_sc_map next"
101 !elwrite(iout,*)"!!!!!!!!!!!!!!!!! in unres"
104 if (fg_rank.gt.0) then
105 ! Fine-grain slaves just do energy and gradient components.
106 call ergastulum ! slave workhouse in Latin
109 if (modecalc.eq.0) then
110 call exec_eeval_or_minim
111 else if (modecalc.eq.1) then
113 else if (modecalc.eq.2) then
115 else if (modecalc.eq.3 .or. modecalc .eq.6) then
117 else if (modecalc.eq.4) then
118 call exec_mult_eeval_or_minim
119 else if (modecalc.eq.5) then
121 else if (ModeCalc.eq.7) then
123 else if (ModeCalc.eq.8) then
125 else if (modecalc.eq.11) then
127 else if (modecalc.eq.12) then
129 else if (modecalc.eq.14) then
132 write (iout,'(a)') 'This calculation type is not supported',&
139 if (fg_rank.eq.0) call finish_task
140 ! call memmon_print_usage()
142 call print_detailed_timing
144 call MPI_Finalize(ierr)
147 call dajczas(tcpu(),hrtime,mintime,sectime)
148 stop '********** Program terminated normally.'
152 !-----------------------------------------------------------------------------
154 !-----------------------------------------------------------------------------
156 use MPI_data !include 'COMMON.SETUP'
157 use control_data !include 'COMMON.CONTROL'
158 use geometry, only:chainbuild
160 use io_units !include 'COMMON.IOUNITS'
163 ! include 'DIMENSIONS'
169 print *,'After MD alloc'
170 if (me.eq.king .or. .not. out1file) &
171 write (iout,*) "Calling chainbuild"
175 end subroutine exec_MD
176 !---------------------------------------------------------------------------
177 subroutine exec_MREMD
178 use MPI_data !include 'COMMON.SETUP'
179 use control_data !include 'COMMON.CONTROL'
180 use io_units !include 'COMMON.IOUNITS'
182 use REMD_data !include 'COMMON.REMD'
183 use geometry, only:chainbuild
187 ! include 'DIMENSIONS'
194 call alloc_MREMD_arrays
196 ! if (me.eq.king .or. .not. out1file) &
197 ! write (iout,*) "Calling chainbuild"
199 if (me.eq.king .or. .not. out1file) &
200 write (iout,*) "Calling REMD",remd_mlist,nrep
210 end subroutine exec_MREMD
211 !-----------------------------------------------------------------------------
212 subroutine exec_eeval_or_minim
213 use MPI_data !include 'COMMON.SETUP'
214 use control_data !include 'COMMON.CONTROL''COMMON.TIME1''COMMON.NAMES''COMMON.HEADER'
215 use io_units !include 'COMMON.IOUNITS'
217 ! use energy !include 'COMMON.INTERACT''COMMON.CONTACTS''COMMON.VAR''COMMON.FFIELD' 'COMMON.SBRIDGE'
218 use geometry_data !include 'COMMON.GEO''COMMON.CHAIN'
219 ! use REMD !include 'COMMON.REMD'
220 ! use MD !include 'COMMON.MD'
223 use MD_data, only: iset
225 use geometry, only:chainbuild
227 use compare, only:alloc_compare_arrays,hairpin,secondary2,rms_nac_nnc
228 use minimm, only:minimize,minim_dc,sc_move
232 ! implicit real*8 (a-h,o-z)
233 ! include 'DIMENSIONS'
238 !el common /srutu/ icall
239 real(kind=8) :: energy_(0:n_ene)
240 real(kind=8) :: energy_long(0:n_ene),energy_short(0:n_ene)
241 real(kind=8) :: varia(6*nres) !(maxvar) (maxvar=6*maxres)
242 real(kind=8) :: time00, evals, etota, etot, time_ene, time1
243 integer :: nharp,nft_sc,iretcode,nfun
244 integer,dimension(4,nres) :: iharp !(4,nres/3)(4,maxres/3)
246 real(kind=8) :: rms,frac,frac_nn,co
249 if (iset.eq.0) iset=1
250 call alloc_compare_arrays
251 if ((indpdb.eq.0).and.(.not.read_cart)) then
253 write(iout,*) 'Warning: Calling chainbuild'
259 ! write(iout,*)"in exec_eeval or minimim",split_ene
261 ! write(iout,*)"cccccc",j,(c(i,j),i=1,3)
262 ! write(iout,*)"dcccccc",j,(dc(i,j),i=1,3)
265 ! write(iout,*)"in exec_eeval or minimim"
267 print *,"Processor",myrank," after chainbuild"
270 call etotal_long(energy_long)
271 write (iout,*) "Printing long range energy"
272 call enerprint(energy_long)
274 call etotal_short(energy_short)
275 write (iout,*) "Printing short range energy"
276 call enerprint(energy_short)
278 energy_(i)=energy_long(i)+energy_short(i)
279 write (iout,*) i,energy_long(i),energy_short(i),energy_(i)
281 write (iout,*) "Printing long+short range energy"
282 call enerprint(energy_)
287 time_ene=MPI_Wtime()-time00
289 write (iout,*) "Time for energy evaluation",time_ene
290 print *,"after etotal"
293 call enerprint(energy_)
294 call hairpin(.true.,nharp,iharp)
295 call secondary2(.true.)
299 print *, 'Calling OVERLAP_SC'
300 call overlap_sc(fail)
304 call sc_move(2,nres-1,10,1d10,nft_sc,etot)
305 print *,'SC_move',nft_sc,etot
306 write(iout,*) 'SC_move',nft_sc,etot
310 print *, 'Calling MINIM_DC'
314 ! call check_ecartint !el
315 call minim_dc(etot,iretcode,nfun)
316 ! call check_ecartint !el
318 if (indpdb.ne.0) then
320 write(iout,*) 'Warning: Calling chainbuild'
323 call geom_to_var(nvar,varia)
324 print *,'Calling MINIMIZE.'
329 ! call exec_checkgrad !el
330 call minimize(etot,varia,iretcode,nfun)
332 ! call exec_checkgrad !el
334 print *,'SUMSL return code is',iretcode,' eval ',nfun
336 evals=nfun/(MPI_WTIME()-time1)
338 print *,'# eval/s',evals
339 print *,'refstr=',refstr
340 ! call hairpin(.true.,nharp,iharp)
341 ! call secondary2(.true.)
344 call enerprint(energy_)
347 call briefout(0,etot)
348 if (refstr) call rms_nac_nnc(rms,frac,frac_nn,co,.true.)
349 write (iout,'(a,i3)') 'SUMSL return code:',iretcode
350 write (iout,'(a,i20)') '# of energy evaluations:',nfun+1
351 write (iout,'(a,f16.3)')'# of energy evaluations/sec:',evals
353 print *,'refstr=',refstr,frac,frac_nn,co
354 if (refstr) call rms_nac_nnc(rms,frac,frac_nn,co,.true.)
355 print *,"after rms_nac_ncc"
356 call briefout(0,etot)
358 if (outpdb) call pdbout(etot,titel(:32),ipdb)
359 if (outmol2) call mol2out(etot,titel(:32))
360 write(iout,*) "after exec_eeval_or_minim"
362 end subroutine exec_eeval_or_minim
363 !-----------------------------------------------------------------------------
364 subroutine exec_regularize
365 ! use MPI_data !include 'COMMON.SETUP'
366 use control_data !include 'COMMON.CONTROL''COMMON.TIME1''COMMON.NAMES''COMMON.HEADER'
367 use io_units !include 'COMMON.IOUNITS'
369 use energy_data !include 'COMMON.INTERACT''COMMON.CONTACTS''COMMON.VAR''COMMON.FFIELD' 'COMMON.SBRIDGE'
370 use geometry_data !include 'COMMON.GEO''COMMON.CHAIN'
371 ! use REMD !include 'COMMON.REMD'
372 ! use MD !include 'COMMON.MD'
376 ! implicit real*8 (a-h,o-z)
377 ! include 'DIMENSIONS'
381 real(kind=8) :: energy_(0:n_ene)
383 real(kind=8) :: rms,frac,frac_nn,co
386 call alloc_compare_arrays
390 call regularize(nct-nnt+1,etot,rms,cref(1,nnt,1),iretcode)
392 energy_(0)=energy_(0)-energy_(14)
394 call enerprint(energy_)
396 call briefout(0,etot)
397 if (outpdb) call pdbout(etot,titel(:32),ipdb)
398 if (outmol2) call mol2out(etot,titel(:32))
399 if (refstr) call rms_nac_nnc(rms,frac,frac_nn,co,.true.)
400 write (iout,'(a,i3)') 'SUMSL return code:',iretcode
402 end subroutine exec_regularize
403 !-----------------------------------------------------------------------------
404 subroutine exec_thread
405 ! use MPI_data !include 'COMMON.SETUP'
408 ! include 'DIMENSIONS'
412 call alloc_compare_arrays
415 end subroutine exec_thread
416 !-----------------------------------------------------------------------------
418 ! use MPI_data !include 'COMMON.SETUP'
419 use control_data !include 'COMMON.CONTROL'
424 ! implicit real*8 (a-h,o-z)
425 ! include 'DIMENSIONS'
426 character(len=10) :: nodeinfo
427 real(kind=8) :: varia(6*nres) !(maxvar) (maxvar=6*maxres)
432 call alloc_MCM_arrays
436 if (modecalc.eq.3) then
442 if (modecalc.eq.3) then
452 end subroutine exec_MC
453 !-----------------------------------------------------------------------------
454 subroutine exec_mult_eeval_or_minim
455 use MPI_data !include 'COMMON.SETUP'
456 use control_data !include 'COMMON.CONTROL''COMMON.TIME1''COMMON.NAMES''COMMON.HEADER'
457 use io_units !include 'COMMON.IOUNITS'
459 use energy_data !include 'COMMON.INTERACT''COMMON.CONTACTS''COMMON.VAR''COMMON.FFIELD' 'COMMON.SBRIDGE'
460 use geometry_data !include 'COMMON.GEO''COMMON.CHAIN'
461 ! use REMD !include 'COMMON.REMD'
462 ! use MD !include 'COMMON.MD'
464 use geometry, only:chainbuild,geom_to_var,int_from_cart1,var_to_geom
465 use energy, only:etotal,enerprint
466 use compare, only:rms_nac_nnc
467 use minimm, only:minimize!,minim_mcmf
468 ! implicit real*8 (a-h,o-z)
469 ! include 'DIMENSIONS'
471 use minimm, only:minim_mcmf
474 integer :: ierror,ierr
476 real(kind=8),dimension(mpi_status_size) :: muster
480 real(kind=8) :: varia(6*nres) !(maxvar) (maxvar=6*maxres)
481 integer,dimension(6) :: ind
482 real(kind=8) :: energy_(0:n_ene)
484 real(kind=8) :: etot,ene0
485 integer :: mm,imm,nft,n,iconf,nmin,i,iretcode,nfun,it,&
487 real(kind=8) :: rms,frac,frac_nn,co,time,ene
497 open(intin,file=intinname,status='old')
498 write (istat,'(a5,20a12)')"# ",&
499 (wname(print_order(i)),i=1,nprint_ene)
501 write (istat,'(a5,20a12)')"# ",&
502 (ename(print_order(i)),i=1,nprint_ene),&
503 "ETOT total","RMSD","nat.contact","nnt.contact"
505 write (istat,'(a5,20a12)')"# ",&
506 (ename(print_order(i)),i=1,nprint_ene),"ETOT total"
512 read (intin,'(e15.10,e15.5)',end=1100,err=1100) time,ene
513 call read_x(intin,*11)
515 ! Broadcast the order to compute internal coordinates to the slaves.
517 call MPI_Bcast(6,1,MPI_INTEGER,king,FG_COMM,IERROR)
519 call int_from_cart1(.false.)
521 read (intin,'(i5)',end=1100,err=1100) iconf
522 call read_angles(intin,*11)
523 call geom_to_var(nvar,varia)
524 write(iout,*) 'Warning: Calling chainbuild1'
527 write (iout,'(a,i7)') 'Conformation #',iconf
529 call briefout(iconf,energy_(0))
530 call enerprint(energy_)
533 call rms_nac_nnc(rms,frac,frac_nn,co,.true.)
534 write (istat,'(i5,20(f12.3))') iconf,&
535 (energy_(print_order(i)),i=1,nprint_ene),etot,&
539 write (istat,'(i5,16(f12.3))') iconf,&
540 (energy_(print_order(i)),i=1,nprint_ene),etot
556 if (mm.lt.nodes) then
558 read (intin,'(e15.10,e15.5)',end=11,err=11) time,ene
559 call read_x(intin,*11)
561 ! Broadcast the order to compute internal coordinates to the slaves.
563 call MPI_Bcast(6,1,MPI_INTEGER,king,FG_COMM,IERROR)
565 call int_from_cart1(.false.)
567 read (intin,'(i5)',end=11,err=11) iconf
568 call read_angles(intin,*11)
569 call geom_to_var(nvar,varia)
570 write(iout,*) 'Warning: Calling chainbuild2'
573 write (iout,'(a,i7)') 'Conformation #',iconf
583 call mpi_send(ind,6,mpi_integer,mm,idint,CG_COMM,&
585 call mpi_send(varia,nvar,mpi_double_precision,mm,&
587 call mpi_send(ene0,1,mpi_double_precision,mm,&
589 ! print *,'task ',n,' sent to worker ',mm,nvar
591 call mpi_recv(ind,6,mpi_integer,mpi_any_source,idint,&
593 man=muster(mpi_source)
594 ! print *,'receiving result from worker ',man,' (',iii1,iii,')'
595 call mpi_recv(varia,nvar,mpi_double_precision,&
596 man,idreal,CG_COMM,muster,ierr)
597 call mpi_recv(ene,1,&
598 mpi_double_precision,man,idreal,&
600 call mpi_recv(ene0,1,&
601 mpi_double_precision,man,idreal,&
603 ! print *,'result received from worker ',man,' sending now'
605 call var_to_geom(nvar,varia)
606 write(iout,*) 'Warning: Calling chainbuild3'
612 write (iout,'(a,2i7)') 'Conformation #',iconf,ind(5)
615 call enerprint(energy_)
616 call briefout(it,etot)
617 ! if (minim) call briefout(it,etot)
619 call rms_nac_nnc(rms,frac,frac_nn,co,.true.)
620 write (istat,'(i5,19(f12.3))') iconf,&
621 (energy_(print_order(i)),i=1,nprint_ene),etot,&
624 write (istat,'(i5,15(f12.3))') iconf,&
625 (energy_(print_order(i)),i=1,nprint_ene),etot
630 read (intin,'(e15.10,e15.5)',end=1101,err=1101) time,ene
631 call read_x(intin,*11)
633 ! Broadcast the order to compute internal coordinates to the slaves.
635 call MPI_Bcast(6,1,MPI_INTEGER,king,FG_COMM,IERROR)
637 call int_from_cart1(.false.)
639 read (intin,'(i5)',end=1101,err=1101) iconf
640 call read_angles(intin,*11)
641 call geom_to_var(nvar,varia)
642 write(iout,*) 'Warning: Calling chainbuild4'
653 call mpi_send(ind,6,mpi_integer,man,idint,CG_COMM,&
655 call mpi_send(varia,nvar,mpi_double_precision,man,&
657 call mpi_send(ene0,1,mpi_double_precision,man,&
659 nf_mcmf=nf_mcmf+ind(4)
665 call mpi_recv(ind,6,mpi_integer,mpi_any_source,idint,&
667 man=muster(mpi_source)
668 call mpi_recv(varia,nvar,mpi_double_precision,&
669 man,idreal,CG_COMM,muster,ierr)
670 call mpi_recv(ene,1,&
671 mpi_double_precision,man,idreal,&
673 call mpi_recv(ene0,1,&
674 mpi_double_precision,man,idreal,&
677 call var_to_geom(nvar,varia)
678 write(iout,*) 'Warning: Calling chainbuild5'
684 write (iout,'(a,2i7)') 'Conformation #',iconf,ind(5)
687 call enerprint(energy_)
688 call briefout(it,etot)
690 call rms_nac_nnc(rms,frac,frac_nn,co,.true.)
691 write (istat,'(i5,19(f12.3))') iconf,&
692 (energy_(print_order(i)),i=1,nprint_ene),etot,&
695 write (istat,'(i5,15(f12.3))') iconf,&
696 (energy_(print_order(i)),i=1,nprint_ene),etot
708 call mpi_send(ind,6,mpi_integer,i,idint,CG_COMM,&
713 open(intin,file=intinname,status='old')
714 write (istat,'(a5,20a12)')"# ",&
715 (wname(print_order(i)),i=1,nprint_ene)
716 write (istat,'("# ",20(1pe12.4))') &
717 (weights(print_order(i)),i=1,nprint_ene)
719 write (istat,'(a5,20a12)')"# ",&
720 (ename(print_order(i)),i=1,nprint_ene),&
721 "ETOT total","RMSD","nat.contact","nnt.contact"
723 write (istat,'(a5,14a12)')"# ",&
724 (ename(print_order(i)),i=1,nprint_ene),"ETOT total"
728 read (intin,'(e15.10,e15.5)',end=11,err=11) time,ene
729 call read_x(intin,*11)
731 ! Broadcast the order to compute internal coordinates to the slaves.
733 call MPI_Bcast(6,1,MPI_INTEGER,king,FG_COMM,IERROR)
735 call int_from_cart1(.false.)
737 read (intin,'(i5)',end=11,err=11) iconf
738 call read_angles(intin,*11)
739 call geom_to_var(nvar,varia)
740 write(iout,*) 'Warning: Calling chainbuild5'
743 write (iout,'(a,i7)') 'Conformation #',iconf
744 if (minim) call minimize(etot,varia,iretcode,nfun)
748 call enerprint(energy_)
749 if (minim) call briefout(it,etot)
751 call rms_nac_nnc(rms,frac,frac_nn,co,.true.)
752 write (istat,'(i5,18(f12.3))') iconf,&
753 (energy_(print_order(i)),i=1,nprint_ene),&
754 etot,rms,frac,frac_nn,co
757 write (istat,'(i5,14(f12.3))') iconf,&
758 (energy_(print_order(i)),i=1,nprint_ene),etot
764 end subroutine exec_mult_eeval_or_minim
765 !-----------------------------------------------------------------------------
766 subroutine exec_checkgrad
767 ! use MPI_data !include 'COMMON.SETUP'
768 use control_data !include 'COMMON.CONTROL''COMMON.TIME1''COMMON.NAMES''COMMON.HEADER'
769 use io_units !include 'COMMON.IOUNITS'
770 !el use energy_data, only:icall !include 'COMMON.INTERACT''COMMON.CONTACTS''COMMON.VAR''COMMON.FFIELD' 'COMMON.SBRIDGE'
771 use geometry_data !include 'COMMON.GEO''COMMON.CHAIN'
772 ! use REMD !include 'COMMON.REMD'
773 use MD_data !include 'COMMON.MD'
774 use io_base, only:intout
775 use io_config, only:read_fragments
780 ! implicit real*8 (a-h,o-z)
781 ! include 'DIMENSIONS'
786 !el common /srutu/ icall
787 real(kind=8) :: energy_(0:max_ene)
791 ! vbld(i)=vbld(i)+ran_number(-0.1d0,0.1d0)
792 ! if (itype(i).ne.10)
793 ! & vbld(i+nres)=vbld(i+nres)+ran_number(-0.001d0,0.001d0)
795 if (indpdb.eq.0) then
796 write(iout,*) 'Warning: Calling chainbuild'
801 ! dc(j,i)=dc(j,i)+ran_number(-0.2d0,0.2d0)
805 ! if (itype(i).ne.10) then
807 ! dc(j,i+nres)=dc(j,i+nres)+ran_number(-0.2d0,0.2d0)
812 ! dc(j,0)=ran_number(-0.2d0,0.2d0)
824 write (iout,*) "before etotal"
825 call etotal(energy_(0))
827 call enerprint(energy_(0))
828 write (iout,*) "Uconst",Uconst," Uconst_back",uconst_back
829 print *,'icheckgrad=',icheckgrad
830 goto (10,20,30) icheckgrad
831 10 call check_ecartint
834 20 call check_cartgrad
839 end subroutine exec_checkgrad
840 !-----------------------------------------------------------------------------
844 use io_config, only:map_read
847 call alloc_map_arrays
851 end subroutine exec_map
852 !-----------------------------------------------------------------------------
855 use io_units !include 'COMMON.IOUNITS'
861 ! include 'DIMENSIONS'
862 ! Conformational Space Annealling programmed by Jooyoung Lee.
863 ! This method works only with parallel machines!
865 call alloc_CSA_arrays
868 write (iout,*) "CSA works on parallel machines only"
871 end subroutine exec_CSA
872 !-----------------------------------------------------------------------------
873 subroutine exec_softreg
874 use io_units !include 'COMMON.IOUNITS'
875 use control_data !include 'COMMON.CONTROL'
877 use io_base, only:intout,briefout
878 use geometry, only:chainbuild
882 ! include 'DIMENSIONS'
883 real(kind=8) :: energy_(0:n_ene)
885 real(kind=8) :: rms,frac,frac_nn,co,etot
888 call alloc_compare_arrays
889 write(iout,*) 'Warning: Calling chainbuild'
892 call enerprint(energy_)
893 if (.not.lsecondary) then
894 write(iout,*) 'Calling secondary structure recognition'
895 call secondary2(debug)
897 write(iout,*) 'Using secondary structure supplied in pdb'
904 call enerprint(energy_)
906 call briefout(0,etot)
907 call secondary2(.true.)
908 if (refstr) call rms_nac_nnc(rms,frac,frac_nn,co,.true.)
910 end subroutine exec_softreg
911 !-----------------------------------------------------------------------------
913 !-----------------------------------------------------------------------------
915 subroutine ergastulum
917 ! implicit real*8 (a-h,o-z)
918 ! include 'DIMENSIONS'
921 use MDyn, only:setup_fricmat
922 use REMD, only:fricmat_mult,ginv_mult
926 ! include 'COMMON.SETUP'
927 ! include 'COMMON.DERIV'
928 ! include 'COMMON.VAR'
929 ! include 'COMMON.IOUNITS'
930 ! include 'COMMON.FFIELD'
931 ! include 'COMMON.INTERACT'
932 ! include 'COMMON.MD'
933 ! include 'COMMON.TIME1'
934 real(kind=8),dimension(6*nres) :: z,d_a_tmp !(maxres6) maxres6=6*maxres
935 real(kind=8) :: edum(0:n_ene),time_order(0:10)
936 !el real(kind=8),dimension(2*nres,2*nres) :: Gcopy !(maxres2,maxres2) maxres2=2*maxres
937 !el common /przechowalnia/ Gcopy
941 real(kind=8) :: time00
942 integer :: iorder,i,j,nres2,ierr,ierror
945 if(.not.allocated(Gcopy)) allocate(Gcopy(nres2,nres2))
947 if(.not.allocated(Ginv)) allocate(Ginv(nres2,nres2)) !(maxres2,maxres2)
950 if(.not.allocated(ng_start)) allocate(ng_start(0:nfgtasks-1))
951 if(.not.allocated(ng_counts)) allocate(ng_counts(0:nfgtasks-1))
952 if(.not.allocated(nginv_counts)) allocate(nginv_counts(0:nfgtasks-1)) !(0:MaxProcs-1)
953 if(.not.allocated(nginv_start)) allocate(nginv_start(0:nfgtasks)) !(0:MaxProcs)
955 if(.not.allocated(fricmat)) allocate(fricmat(nres2,nres2)) !maxres2=2*maxres
957 ! Workers wait for variables and NF, and NFL from the boss
959 do while (iorder.ge.0)
960 ! write (*,*) 'Processor',fg_rank,' CG group',kolor,
961 ! & ' receives order from Master'
963 call MPI_Bcast(iorder,1,MPI_INTEGER,king,FG_COMM,IERR)
964 time_Bcast=time_Bcast+MPI_Wtime()-time00
965 if (icall.gt.4 .and. iorder.ge.0) &
966 time_order(iorder)=time_order(iorder)+MPI_Wtime()-time00
969 ! & 'Processor',fg_rank,' completed receive MPI_BCAST order',iorder
970 if (iorder.eq.0) then
973 ! write (2,*) "After etotal"
974 ! write (2,*) "dimen",dimen," dimen3",dimen3
976 else if (iorder.eq.2) then
978 call etotal_short(edum)
979 ! write (2,*) "After etotal_short"
980 ! write (2,*) "dimen",dimen," dimen3",dimen3
982 else if (iorder.eq.3) then
984 call etotal_long(edum)
985 ! write (2,*) "After etotal_long"
986 ! write (2,*) "dimen",dimen," dimen3",dimen3
988 else if (iorder.eq.1) then
990 ! write (2,*) "After sum_gradient"
991 ! write (2,*) "dimen",dimen," dimen3",dimen3
993 else if (iorder.eq.4) then
994 call ginv_mult(z,d_a_tmp)
995 else if (iorder.eq.5) then
996 ! Setup MD things for a slave
997 dimen=(nct-nnt+1)+nside
998 dimen1=(nct-nnt)+(nct-nnt+1)
1000 ! write (2,*) "dimen",dimen," dimen3",dimen3
1002 call int_bounds(dimen,igmult_start,igmult_end)
1003 igmult_start=igmult_start-1
1004 call MPI_Allgather(3*igmult_start,1,MPI_INTEGER,&
1005 ng_start(0),1,MPI_INTEGER,FG_COMM,IERROR)
1006 my_ng_count=igmult_end-igmult_start
1007 call MPI_Allgather(3*my_ng_count,1,MPI_INTEGER,ng_counts(0),1,&
1008 MPI_INTEGER,FG_COMM,IERROR)
1009 write (2,*) "ng_start",(ng_start(i),i=0,nfgtasks-1) !sp
1010 ! write (2,*) "ng_counts",(ng_counts(i),i=0,nfgtasks-1)
1011 myginv_ng_count=nres2*my_ng_count !el maxres2
1012 ! write (2,*) "igmult_start",igmult_start," igmult_end",
1013 ! & igmult_end," my_ng_count",my_ng_count
1015 call MPI_Allgather(nres2*igmult_start,1,MPI_INTEGER,& !el maxres2
1016 nginv_start(0),1,MPI_INTEGER,FG_COMM,IERROR)
1017 call MPI_Allgather(myginv_ng_count,1,MPI_INTEGER,&
1018 nginv_counts(0),1,MPI_INTEGER,FG_COMM,IERROR)
1019 ! write (2,*) "nginv_start",(nginv_start(i),i=0,nfgtasks-1)
1020 ! write (2,*) "nginv_counts",(nginv_counts(i),i=0,nfgtasks-1)
1022 ! call MPI_Barrier(FG_COMM,IERROR)
1024 call MPI_Scatterv(ginv(1,1),nginv_counts(0),&
1025 nginv_start(0),MPI_DOUBLE_PRECISION,gcopy(1,1),&
1026 myginv_ng_count,MPI_DOUBLE_PRECISION,king,FG_COMM,IERR)
1028 time_scatter_ginv=time_scatter_ginv+MPI_Wtime()-time00
1031 do j=1,2*my_ng_count
1032 ginv(j,i)=gcopy(i,j)
1035 ! write (2,*) "dimen",dimen," dimen3",dimen3
1036 ! write (2,*) "End MD setup"
1038 ! write (iout,*) "My chunk of ginv_block"
1039 ! call MATOUT2(my_ng_count,dimen3,maxres2,maxers2,ginv_block)
1040 else if (iorder.eq.6) then
1041 call int_from_cart1(.false.)
1042 else if (iorder.eq.7) then
1043 call chainbuild_cart
1044 else if (iorder.eq.8) then
1046 else if (iorder.eq.9) then
1047 call fricmat_mult(z,d_a_tmp)
1048 else if (iorder.eq.10) then
1052 write (*,*) 'Processor',fg_rank,' CG group',kolor,&
1053 ' absolute rank',myrank,' leves ERGASTULUM.'
1054 write(*,*)'Processor',fg_rank,' wait times for respective orders',&
1055 (' order[',i,']',time_order(i),i=0,10)
1057 end subroutine ergastulum