1 !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
5 ! Program to carry out conformational search of proteins in an united-residue !
8 !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
13 use control, only:tcpu
14 use io_base, only:ilen
15 use geometry, only:chainbuild
16 use control, only:dajczas
19 use compare, only: test
25 ! implicit real*8 (a-h,o-z)
26 ! include 'DIMENSIONS'
28 use MPI_data ! include 'COMMON.SETUP'
33 use MPI_data, only: me,king
36 ! include 'COMMON.TIME1'
37 ! include 'COMMON.INTERACT'
38 ! include 'COMMON.NAMES'
39 ! include 'COMMON.GEO'
40 ! include 'COMMON.HEADER'
41 ! include 'COMMON.CONTROL'
42 ! include 'COMMON.CONTACTS'
43 ! include 'COMMON.CHAIN'
44 ! include 'COMMON.VAR'
45 ! include 'COMMON.IOUNITS'
46 ! include 'COMMON.FFIELD'
47 ! include 'COMMON.REMD'
49 ! include 'COMMON.SBRIDGE'
51 real(kind=8) :: hrtime,mintime,sectime
52 character(len=64) :: text_mode_calc(-2:14)
53 text_mode_calc(-2) = 'test'
54 text_mode_calc(-1) = 'cos'
55 text_mode_calc(0) = 'Energy evaluation or minimization'
56 text_mode_calc(1) = 'Regularization of PDB structure'
57 text_mode_calc(2) = 'Threading of a sequence on PDB structures'
58 text_mode_calc(3) = 'Monte Carlo (with minimization) '
59 text_mode_calc(4) = 'Energy minimization of multiple conformations'
60 text_mode_calc(5) = 'Checking energy gradient'
61 text_mode_calc(6) = 'Entropic sampling Monte Carlo (with minimization)'
62 text_mode_calc(7) = 'Energy map'
63 text_mode_calc(8) = 'CSA calculations'
64 text_mode_calc(9) = 'Not used 9'
65 text_mode_calc(10) = 'Not used 10'
66 text_mode_calc(11) = 'Soft regularization of PDB structure'
67 text_mode_calc(12) = 'Mesoscopic molecular dynamics (MD) '
68 text_mode_calc(13) = 'Not used 13'
69 text_mode_calc(14) = 'Replica exchange molecular dynamics (REMD)'
71 ! call memmon_print_usage()
75 write(iout,*)'### LAST MODIFIED 09/03/15 15:32PM by EL'
76 if (me.eq.king) call cinfo
77 ! Read force field parameters and job setup data
81 if (me.eq.king .or. .not. out1file) then
82 write (iout,'(2a/)') &
83 text_mode_calc(modecalc)(:ilen(text_mode_calc(modecalc))), &
85 if (minim) write (iout,'(a)') &
86 'Conformations will be energy-minimized.'
87 write (iout,'(80(1h*)/)')
91 if (modecalc.eq.-2) then
94 else if (modecalc.eq.-1) then
95 write(iout,*) "call check_sc_map next"
99 !elwrite(iout,*)"!!!!!!!!!!!!!!!!! in unres"
102 if (fg_rank.gt.0) then
103 ! Fine-grain slaves just do energy and gradient components.
104 call ergastulum ! slave workhouse in Latin
107 if (modecalc.eq.0) then
108 call exec_eeval_or_minim
109 else if (modecalc.eq.1) then
111 else if (modecalc.eq.2) then
113 else if (modecalc.eq.3 .or. modecalc .eq.6) then
115 else if (modecalc.eq.4) then
116 call exec_mult_eeval_or_minim
117 else if (modecalc.eq.5) then
119 else if (ModeCalc.eq.7) then
121 else if (ModeCalc.eq.8) then
123 else if (modecalc.eq.11) then
125 else if (modecalc.eq.12) then
127 else if (modecalc.eq.14) then
130 write (iout,'(a)') 'This calculation type is not supported',&
137 if (fg_rank.eq.0) call finish_task
138 ! call memmon_print_usage()
140 call print_detailed_timing
142 call MPI_Finalize(ierr)
145 call dajczas(tcpu(),hrtime,mintime,sectime)
146 stop '********** Program terminated normally.'
150 !-----------------------------------------------------------------------------
152 !-----------------------------------------------------------------------------
154 use MPI_data !include 'COMMON.SETUP'
155 use control_data !include 'COMMON.CONTROL'
156 use geometry, only:chainbuild
158 use io_units !include 'COMMON.IOUNITS'
161 ! include 'DIMENSIONS'
167 print *,'After MD alloc'
168 ! if (me.eq.king .or. .not. out1file) &
169 ! write (iout,*) "Calling chainbuild"
173 end subroutine exec_MD
174 !---------------------------------------------------------------------------
175 subroutine exec_MREMD
176 use MPI_data !include 'COMMON.SETUP'
177 use control_data !include 'COMMON.CONTROL'
178 use io_units !include 'COMMON.IOUNITS'
180 use REMD_data !include 'COMMON.REMD'
181 use geometry, only:chainbuild
185 ! include 'DIMENSIONS'
192 call alloc_MREMD_arrays
194 ! if (me.eq.king .or. .not. out1file) &
195 ! write (iout,*) "Calling chainbuild"
197 if (me.eq.king .or. .not. out1file) &
198 write (iout,*) "Calling REMD",remd_mlist,nrep
208 end subroutine exec_MREMD
209 !-----------------------------------------------------------------------------
210 subroutine exec_eeval_or_minim
211 use MPI_data !include 'COMMON.SETUP'
212 use control_data !include 'COMMON.CONTROL''COMMON.TIME1''COMMON.NAMES''COMMON.HEADER'
213 use io_units !include 'COMMON.IOUNITS'
215 ! use energy !include 'COMMON.INTERACT''COMMON.CONTACTS''COMMON.VAR''COMMON.FFIELD' 'COMMON.SBRIDGE'
216 use geometry_data !include 'COMMON.GEO''COMMON.CHAIN'
217 ! use REMD !include 'COMMON.REMD'
218 ! use MD !include 'COMMON.MD'
223 use geometry, only:chainbuild
225 use compare, only:alloc_compare_arrays,hairpin,secondary2,rms_nac_nnc
226 use minimm, only:minimize,minim_dc,sc_move
230 ! implicit real*8 (a-h,o-z)
231 ! include 'DIMENSIONS'
236 !el common /srutu/ icall
237 real(kind=8) :: energy_(0:n_ene)
238 real(kind=8) :: energy_long(0:n_ene),energy_short(0:n_ene)
239 real(kind=8) :: varia(6*nres) !(maxvar) (maxvar=6*maxres)
240 real(kind=8) :: time00, evals, etota, etot, time_ene, time1
241 integer :: nharp,nft_sc,iretcode,nfun
242 integer,dimension(4,nres/3) :: iharp !(4,nres/3)(4,maxres/3)
244 real(kind=8) :: rms,frac,frac_nn,co
247 call alloc_compare_arrays
248 if ((indpdb.eq.0).and.(.not.read_cart)) then
250 write(iout,*) 'Warning: Calling chainbuild'
256 ! write(iout,*)"in exec_eeval or minimim",split_ene
258 ! write(iout,*)"cccccc",j,(c(i,j),i=1,3)
259 ! write(iout,*)"dcccccc",j,(dc(i,j),i=1,3)
262 ! write(iout,*)"in exec_eeval or minimim"
264 print *,"Processor",myrank," after chainbuild"
267 call etotal_long(energy_long)
268 write (iout,*) "Printing long range energy"
269 call enerprint(energy_long)
271 call etotal_short(energy_short)
272 write (iout,*) "Printing short range energy"
273 call enerprint(energy_short)
275 energy_(i)=energy_long(i)+energy_short(i)
276 write (iout,*) i,energy_long(i),energy_short(i),energy_(i)
278 write (iout,*) "Printing long+short range energy"
279 call enerprint(energy_)
284 time_ene=MPI_Wtime()-time00
286 write (iout,*) "Time for energy evaluation",time_ene
287 print *,"after etotal"
290 call enerprint(energy_)
291 call hairpin(.true.,nharp,iharp)
292 call secondary2(.true.)
296 print *, 'Calling OVERLAP_SC'
297 call overlap_sc(fail)
301 call sc_move(2,nres-1,10,1d10,nft_sc,etot)
302 print *,'SC_move',nft_sc,etot
303 write(iout,*) 'SC_move',nft_sc,etot
307 print *, 'Calling MINIM_DC'
311 ! call check_ecartint !el
312 call minim_dc(etot,iretcode,nfun)
313 ! call check_ecartint !el
315 if (indpdb.ne.0) then
317 write(iout,*) 'Warning: Calling chainbuild'
320 call geom_to_var(nvar,varia)
321 print *,'Calling MINIMIZE.'
326 ! call exec_checkgrad !el
327 call minimize(etot,varia,iretcode,nfun)
329 ! call exec_checkgrad !el
331 print *,'SUMSL return code is',iretcode,' eval ',nfun
333 evals=nfun/(MPI_WTIME()-time1)
335 print *,'# eval/s',evals
336 print *,'refstr=',refstr
337 call hairpin(.true.,nharp,iharp)
338 call secondary2(.true.)
341 call enerprint(energy_)
344 call briefout(0,etot)
345 if (refstr) call rms_nac_nnc(rms,frac,frac_nn,co,.true.)
346 write (iout,'(a,i3)') 'SUMSL return code:',iretcode
347 write (iout,'(a,i20)') '# of energy evaluations:',nfun+1
348 write (iout,'(a,f16.3)')'# of energy evaluations/sec:',evals
350 print *,'refstr=',refstr,frac,frac_nn,co
351 if (refstr) call rms_nac_nnc(rms,frac,frac_nn,co,.true.)
352 print *,"after rms_nac_ncc"
353 call briefout(0,etot)
355 if (outpdb) call pdbout(etot,titel(:32),ipdb)
356 if (outmol2) call mol2out(etot,titel(:32))
357 write(iout,*) "after exec_eeval_or_minim"
359 end subroutine exec_eeval_or_minim
360 !-----------------------------------------------------------------------------
361 subroutine exec_regularize
362 ! use MPI_data !include 'COMMON.SETUP'
363 use control_data !include 'COMMON.CONTROL''COMMON.TIME1''COMMON.NAMES''COMMON.HEADER'
364 use io_units !include 'COMMON.IOUNITS'
366 use energy_data !include 'COMMON.INTERACT''COMMON.CONTACTS''COMMON.VAR''COMMON.FFIELD' 'COMMON.SBRIDGE'
367 use geometry_data !include 'COMMON.GEO''COMMON.CHAIN'
368 ! use REMD !include 'COMMON.REMD'
369 ! use MD !include 'COMMON.MD'
373 ! implicit real*8 (a-h,o-z)
374 ! include 'DIMENSIONS'
378 real(kind=8) :: energy_(0:n_ene)
380 real(kind=8) :: rms,frac,frac_nn,co
383 call alloc_compare_arrays
387 call regularize(nct-nnt+1,etot,rms,cref(1,nnt,1),iretcode)
389 energy_(0)=energy_(0)-energy_(14)
391 call enerprint(energy_)
393 call briefout(0,etot)
394 if (outpdb) call pdbout(etot,titel(:32),ipdb)
395 if (outmol2) call mol2out(etot,titel(:32))
396 if (refstr) call rms_nac_nnc(rms,frac,frac_nn,co,.true.)
397 write (iout,'(a,i3)') 'SUMSL return code:',iretcode
399 end subroutine exec_regularize
400 !-----------------------------------------------------------------------------
401 subroutine exec_thread
402 ! use MPI_data !include 'COMMON.SETUP'
405 ! include 'DIMENSIONS'
409 call alloc_compare_arrays
412 end subroutine exec_thread
413 !-----------------------------------------------------------------------------
415 ! use MPI_data !include 'COMMON.SETUP'
416 use control_data !include 'COMMON.CONTROL'
421 ! implicit real*8 (a-h,o-z)
422 ! include 'DIMENSIONS'
423 character(len=10) :: nodeinfo
424 real(kind=8) :: varia(6*nres) !(maxvar) (maxvar=6*maxres)
429 call alloc_MCM_arrays
433 if (modecalc.eq.3) then
439 if (modecalc.eq.3) then
449 end subroutine exec_MC
450 !-----------------------------------------------------------------------------
451 subroutine exec_mult_eeval_or_minim
452 use MPI_data !include 'COMMON.SETUP'
453 use control_data !include 'COMMON.CONTROL''COMMON.TIME1''COMMON.NAMES''COMMON.HEADER'
454 use io_units !include 'COMMON.IOUNITS'
456 use energy_data !include 'COMMON.INTERACT''COMMON.CONTACTS''COMMON.VAR''COMMON.FFIELD' 'COMMON.SBRIDGE'
457 use geometry_data !include 'COMMON.GEO''COMMON.CHAIN'
458 ! use REMD !include 'COMMON.REMD'
459 ! use MD !include 'COMMON.MD'
461 use geometry, only:chainbuild,geom_to_var,int_from_cart1,var_to_geom
462 use energy, only:etotal,enerprint
463 use compare, only:rms_nac_nnc
464 use minimm, only:minimize!,minim_mcmf
465 ! implicit real*8 (a-h,o-z)
466 ! include 'DIMENSIONS'
468 use minimm, only:minim_mcmf
471 integer :: ierror,ierr
473 real(kind=8),dimension(mpi_status_size) :: muster
477 real(kind=8) :: varia(6*nres) !(maxvar) (maxvar=6*maxres)
478 integer,dimension(6) :: ind
479 real(kind=8) :: energy_(0:n_ene)
481 real(kind=8) :: etot,ene0
482 integer :: mm,imm,nft,n,iconf,nmin,i,iretcode,nfun,it,&
484 real(kind=8) :: rms,frac,frac_nn,co,time,ene
494 open(intin,file=intinname,status='old')
495 write (istat,'(a5,20a12)')"# ",&
496 (wname(print_order(i)),i=1,nprint_ene)
498 write (istat,'(a5,20a12)')"# ",&
499 (ename(print_order(i)),i=1,nprint_ene),&
500 "ETOT total","RMSD","nat.contact","nnt.contact"
502 write (istat,'(a5,20a12)')"# ",&
503 (ename(print_order(i)),i=1,nprint_ene),"ETOT total"
509 read (intin,'(e15.10,e15.5)',end=1100,err=1100) time,ene
510 call read_x(intin,*11)
512 ! Broadcast the order to compute internal coordinates to the slaves.
514 call MPI_Bcast(6,1,MPI_INTEGER,king,FG_COMM,IERROR)
516 call int_from_cart1(.false.)
518 read (intin,'(i5)',end=1100,err=1100) iconf
519 call read_angles(intin,*11)
520 call geom_to_var(nvar,varia)
521 write(iout,*) 'Warning: Calling chainbuild1'
524 write (iout,'(a,i7)') 'Conformation #',iconf
526 call briefout(iconf,energy_(0))
527 call enerprint(energy_)
530 call rms_nac_nnc(rms,frac,frac_nn,co,.true.)
531 write (istat,'(i5,20(f12.3))') iconf,&
532 (energy_(print_order(i)),i=1,nprint_ene),etot,&
536 write (istat,'(i5,16(f12.3))') iconf,&
537 (energy_(print_order(i)),i=1,nprint_ene),etot
553 if (mm.lt.nodes) then
555 read (intin,'(e15.10,e15.5)',end=11,err=11) time,ene
556 call read_x(intin,*11)
558 ! Broadcast the order to compute internal coordinates to the slaves.
560 call MPI_Bcast(6,1,MPI_INTEGER,king,FG_COMM,IERROR)
562 call int_from_cart1(.false.)
564 read (intin,'(i5)',end=11,err=11) iconf
565 call read_angles(intin,*11)
566 call geom_to_var(nvar,varia)
567 write(iout,*) 'Warning: Calling chainbuild2'
570 write (iout,'(a,i7)') 'Conformation #',iconf
580 call mpi_send(ind,6,mpi_integer,mm,idint,CG_COMM,&
582 call mpi_send(varia,nvar,mpi_double_precision,mm,&
584 call mpi_send(ene0,1,mpi_double_precision,mm,&
586 ! print *,'task ',n,' sent to worker ',mm,nvar
588 call mpi_recv(ind,6,mpi_integer,mpi_any_source,idint,&
590 man=muster(mpi_source)
591 ! print *,'receiving result from worker ',man,' (',iii1,iii,')'
592 call mpi_recv(varia,nvar,mpi_double_precision,&
593 man,idreal,CG_COMM,muster,ierr)
594 call mpi_recv(ene,1,&
595 mpi_double_precision,man,idreal,&
597 call mpi_recv(ene0,1,&
598 mpi_double_precision,man,idreal,&
600 ! print *,'result received from worker ',man,' sending now'
602 call var_to_geom(nvar,varia)
603 write(iout,*) 'Warning: Calling chainbuild3'
609 write (iout,'(a,2i7)') 'Conformation #',iconf,ind(5)
612 call enerprint(energy_)
613 call briefout(it,etot)
614 ! if (minim) call briefout(it,etot)
616 call rms_nac_nnc(rms,frac,frac_nn,co,.true.)
617 write (istat,'(i5,19(f12.3))') iconf,&
618 (energy_(print_order(i)),i=1,nprint_ene),etot,&
621 write (istat,'(i5,15(f12.3))') iconf,&
622 (energy_(print_order(i)),i=1,nprint_ene),etot
627 read (intin,'(e15.10,e15.5)',end=1101,err=1101) time,ene
628 call read_x(intin,*11)
630 ! Broadcast the order to compute internal coordinates to the slaves.
632 call MPI_Bcast(6,1,MPI_INTEGER,king,FG_COMM,IERROR)
634 call int_from_cart1(.false.)
636 read (intin,'(i5)',end=1101,err=1101) iconf
637 call read_angles(intin,*11)
638 call geom_to_var(nvar,varia)
639 write(iout,*) 'Warning: Calling chainbuild4'
650 call mpi_send(ind,6,mpi_integer,man,idint,CG_COMM,&
652 call mpi_send(varia,nvar,mpi_double_precision,man,&
654 call mpi_send(ene0,1,mpi_double_precision,man,&
656 nf_mcmf=nf_mcmf+ind(4)
662 call mpi_recv(ind,6,mpi_integer,mpi_any_source,idint,&
664 man=muster(mpi_source)
665 call mpi_recv(varia,nvar,mpi_double_precision,&
666 man,idreal,CG_COMM,muster,ierr)
667 call mpi_recv(ene,1,&
668 mpi_double_precision,man,idreal,&
670 call mpi_recv(ene0,1,&
671 mpi_double_precision,man,idreal,&
674 call var_to_geom(nvar,varia)
675 write(iout,*) 'Warning: Calling chainbuild5'
681 write (iout,'(a,2i7)') 'Conformation #',iconf,ind(5)
684 call enerprint(energy_)
685 call briefout(it,etot)
687 call rms_nac_nnc(rms,frac,frac_nn,co,.true.)
688 write (istat,'(i5,19(f12.3))') iconf,&
689 (energy_(print_order(i)),i=1,nprint_ene),etot,&
692 write (istat,'(i5,15(f12.3))') iconf,&
693 (energy_(print_order(i)),i=1,nprint_ene),etot
705 call mpi_send(ind,6,mpi_integer,i,idint,CG_COMM,&
710 open(intin,file=intinname,status='old')
711 write (istat,'(a5,20a12)')"# ",&
712 (wname(print_order(i)),i=1,nprint_ene)
713 write (istat,'("# ",20(1pe12.4))') &
714 (weights(print_order(i)),i=1,nprint_ene)
716 write (istat,'(a5,20a12)')"# ",&
717 (ename(print_order(i)),i=1,nprint_ene),&
718 "ETOT total","RMSD","nat.contact","nnt.contact"
720 write (istat,'(a5,14a12)')"# ",&
721 (ename(print_order(i)),i=1,nprint_ene),"ETOT total"
725 read (intin,'(e15.10,e15.5)',end=11,err=11) time,ene
726 call read_x(intin,*11)
728 ! Broadcast the order to compute internal coordinates to the slaves.
730 call MPI_Bcast(6,1,MPI_INTEGER,king,FG_COMM,IERROR)
732 call int_from_cart1(.false.)
734 read (intin,'(i5)',end=11,err=11) iconf
735 call read_angles(intin,*11)
736 call geom_to_var(nvar,varia)
737 write(iout,*) 'Warning: Calling chainbuild5'
740 write (iout,'(a,i7)') 'Conformation #',iconf
741 if (minim) call minimize(etot,varia,iretcode,nfun)
745 call enerprint(energy_)
746 if (minim) call briefout(it,etot)
748 call rms_nac_nnc(rms,frac,frac_nn,co,.true.)
749 write (istat,'(i5,18(f12.3))') iconf,&
750 (energy_(print_order(i)),i=1,nprint_ene),&
751 etot,rms,frac,frac_nn,co
754 write (istat,'(i5,14(f12.3))') iconf,&
755 (energy_(print_order(i)),i=1,nprint_ene),etot
761 end subroutine exec_mult_eeval_or_minim
762 !-----------------------------------------------------------------------------
763 subroutine exec_checkgrad
764 ! use MPI_data !include 'COMMON.SETUP'
765 use control_data !include 'COMMON.CONTROL''COMMON.TIME1''COMMON.NAMES''COMMON.HEADER'
766 use io_units !include 'COMMON.IOUNITS'
767 !el use energy_data, only:icall !include 'COMMON.INTERACT''COMMON.CONTACTS''COMMON.VAR''COMMON.FFIELD' 'COMMON.SBRIDGE'
768 use geometry_data !include 'COMMON.GEO''COMMON.CHAIN'
769 ! use REMD !include 'COMMON.REMD'
770 use MD_data !include 'COMMON.MD'
771 use io_base, only:intout
772 use io_config, only:read_fragments
777 ! implicit real*8 (a-h,o-z)
778 ! include 'DIMENSIONS'
783 !el common /srutu/ icall
784 real(kind=8) :: energy_(0:max_ene)
788 ! vbld(i)=vbld(i)+ran_number(-0.1d0,0.1d0)
789 ! if (itype(i).ne.10)
790 ! & vbld(i+nres)=vbld(i+nres)+ran_number(-0.001d0,0.001d0)
792 if (indpdb.eq.0) then
793 write(iout,*) 'Warning: Calling chainbuild'
798 ! dc(j,i)=dc(j,i)+ran_number(-0.2d0,0.2d0)
802 ! if (itype(i).ne.10) then
804 ! dc(j,i+nres)=dc(j,i+nres)+ran_number(-0.2d0,0.2d0)
809 ! dc(j,0)=ran_number(-0.2d0,0.2d0)
819 call etotal(energy_(0))
821 call enerprint(energy_(0))
822 write (iout,*) "Uconst",Uconst," Uconst_back",uconst_back
823 print *,'icheckgrad=',icheckgrad
824 goto (10,20,30) icheckgrad
825 10 call check_ecartint
827 20 call check_cartgrad
831 end subroutine exec_checkgrad
832 !-----------------------------------------------------------------------------
836 use io_config, only:map_read
839 call alloc_map_arrays
843 end subroutine exec_map
844 !-----------------------------------------------------------------------------
847 use io_units !include 'COMMON.IOUNITS'
853 ! include 'DIMENSIONS'
854 ! Conformational Space Annealling programmed by Jooyoung Lee.
855 ! This method works only with parallel machines!
857 call alloc_CSA_arrays
860 write (iout,*) "CSA works on parallel machines only"
863 end subroutine exec_CSA
864 !-----------------------------------------------------------------------------
865 subroutine exec_softreg
866 use io_units !include 'COMMON.IOUNITS'
867 use control_data !include 'COMMON.CONTROL'
869 use io_base, only:intout,briefout
870 use geometry, only:chainbuild
874 ! include 'DIMENSIONS'
875 real(kind=8) :: energy_(0:n_ene)
877 real(kind=8) :: rms,frac,frac_nn,co,etot
880 call alloc_compare_arrays
881 write(iout,*) 'Warning: Calling chainbuild'
884 call enerprint(energy_)
885 if (.not.lsecondary) then
886 write(iout,*) 'Calling secondary structure recognition'
887 call secondary2(debug)
889 write(iout,*) 'Using secondary structure supplied in pdb'
896 call enerprint(energy_)
898 call briefout(0,etot)
899 call secondary2(.true.)
900 if (refstr) call rms_nac_nnc(rms,frac,frac_nn,co,.true.)
902 end subroutine exec_softreg
903 !-----------------------------------------------------------------------------
905 !-----------------------------------------------------------------------------
907 subroutine ergastulum
909 ! implicit real*8 (a-h,o-z)
910 ! include 'DIMENSIONS'
913 use MDyn, only:setup_fricmat
914 use REMD, only:fricmat_mult,ginv_mult
918 ! include 'COMMON.SETUP'
919 ! include 'COMMON.DERIV'
920 ! include 'COMMON.VAR'
921 ! include 'COMMON.IOUNITS'
922 ! include 'COMMON.FFIELD'
923 ! include 'COMMON.INTERACT'
924 ! include 'COMMON.MD'
925 ! include 'COMMON.TIME1'
926 real(kind=8),dimension(6*nres) :: z,d_a_tmp !(maxres6) maxres6=6*maxres
927 real(kind=8) :: edum(0:n_ene),time_order(0:10)
928 !el real(kind=8),dimension(2*nres,2*nres) :: Gcopy !(maxres2,maxres2) maxres2=2*maxres
929 !el common /przechowalnia/ Gcopy
933 real(kind=8) :: time00
934 integer :: iorder,i,j,nres2,ierr,ierror
937 if(.not.allocated(Gcopy)) allocate(Gcopy(nres2,nres2))
939 if(.not.allocated(Ginv)) allocate(Ginv(nres2,nres2)) !(maxres2,maxres2)
942 if(.not.allocated(ng_start)) allocate(ng_start(0:nfgtasks-1))
943 if(.not.allocated(ng_counts)) allocate(ng_counts(0:nfgtasks-1))
944 if(.not.allocated(nginv_counts)) allocate(nginv_counts(0:nfgtasks-1)) !(0:MaxProcs-1)
945 if(.not.allocated(nginv_start)) allocate(nginv_start(0:nfgtasks)) !(0:MaxProcs)
947 if(.not.allocated(fricmat)) allocate(fricmat(nres2,nres2)) !maxres2=2*maxres
949 ! Workers wait for variables and NF, and NFL from the boss
951 do while (iorder.ge.0)
952 ! write (*,*) 'Processor',fg_rank,' CG group',kolor,
953 ! & ' receives order from Master'
955 call MPI_Bcast(iorder,1,MPI_INTEGER,king,FG_COMM,IERR)
956 time_Bcast=time_Bcast+MPI_Wtime()-time00
957 if (icall.gt.4 .and. iorder.ge.0) &
958 time_order(iorder)=time_order(iorder)+MPI_Wtime()-time00
961 ! & 'Processor',fg_rank,' completed receive MPI_BCAST order',iorder
962 if (iorder.eq.0) then
965 ! write (2,*) "After etotal"
966 ! write (2,*) "dimen",dimen," dimen3",dimen3
968 else if (iorder.eq.2) then
970 call etotal_short(edum)
971 ! write (2,*) "After etotal_short"
972 ! write (2,*) "dimen",dimen," dimen3",dimen3
974 else if (iorder.eq.3) then
976 call etotal_long(edum)
977 ! write (2,*) "After etotal_long"
978 ! write (2,*) "dimen",dimen," dimen3",dimen3
980 else if (iorder.eq.1) then
982 ! write (2,*) "After sum_gradient"
983 ! write (2,*) "dimen",dimen," dimen3",dimen3
985 else if (iorder.eq.4) then
986 call ginv_mult(z,d_a_tmp)
987 else if (iorder.eq.5) then
988 ! Setup MD things for a slave
989 dimen=(nct-nnt+1)+nside
990 dimen1=(nct-nnt)+(nct-nnt+1)
992 ! write (2,*) "dimen",dimen," dimen3",dimen3
994 call int_bounds(dimen,igmult_start,igmult_end)
995 igmult_start=igmult_start-1
996 call MPI_Allgather(3*igmult_start,1,MPI_INTEGER,&
997 ng_start(0),1,MPI_INTEGER,FG_COMM,IERROR)
998 my_ng_count=igmult_end-igmult_start
999 call MPI_Allgather(3*my_ng_count,1,MPI_INTEGER,ng_counts(0),1,&
1000 MPI_INTEGER,FG_COMM,IERROR)
1001 write (2,*) "ng_start",(ng_start(i),i=0,nfgtasks-1) !sp
1002 ! write (2,*) "ng_counts",(ng_counts(i),i=0,nfgtasks-1)
1003 myginv_ng_count=nres2*my_ng_count !el maxres2
1004 ! write (2,*) "igmult_start",igmult_start," igmult_end",
1005 ! & igmult_end," my_ng_count",my_ng_count
1007 call MPI_Allgather(nres2*igmult_start,1,MPI_INTEGER,& !el maxres2
1008 nginv_start(0),1,MPI_INTEGER,FG_COMM,IERROR)
1009 call MPI_Allgather(myginv_ng_count,1,MPI_INTEGER,&
1010 nginv_counts(0),1,MPI_INTEGER,FG_COMM,IERROR)
1011 ! write (2,*) "nginv_start",(nginv_start(i),i=0,nfgtasks-1)
1012 ! write (2,*) "nginv_counts",(nginv_counts(i),i=0,nfgtasks-1)
1014 ! call MPI_Barrier(FG_COMM,IERROR)
1016 call MPI_Scatterv(ginv(1,1),nginv_counts(0),&
1017 nginv_start(0),MPI_DOUBLE_PRECISION,gcopy(1,1),&
1018 myginv_ng_count,MPI_DOUBLE_PRECISION,king,FG_COMM,IERR)
1020 time_scatter_ginv=time_scatter_ginv+MPI_Wtime()-time00
1023 do j=1,2*my_ng_count
1024 ginv(j,i)=gcopy(i,j)
1027 ! write (2,*) "dimen",dimen," dimen3",dimen3
1028 ! write (2,*) "End MD setup"
1030 ! write (iout,*) "My chunk of ginv_block"
1031 ! call MATOUT2(my_ng_count,dimen3,maxres2,maxers2,ginv_block)
1032 else if (iorder.eq.6) then
1033 call int_from_cart1(.false.)
1034 else if (iorder.eq.7) then
1035 call chainbuild_cart
1036 else if (iorder.eq.8) then
1038 else if (iorder.eq.9) then
1039 call fricmat_mult(z,d_a_tmp)
1040 else if (iorder.eq.10) then
1044 write (*,*) 'Processor',fg_rank,' CG group',kolor,&
1045 ' absolute rank',myrank,' leves ERGASTULUM.'
1046 write(*,*)'Processor',fg_rank,' wait times for respective orders',&
1047 (' order[',i,']',time_order(i),i=0,10)
1049 end subroutine ergastulum