This repository has been archived by the owner on Oct 9, 2024. It is now read-only.
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathftmain.f90
141 lines (96 loc) · 3.68 KB
/
ftmain.f90
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
program ftmain
!-----------------------------------------------------------------------
! since '94/07/31
! update '09/09/14
!
! Electronic structure calculation
! with the plane-wave method for solving Khon-Sham equation,
! and the finite difference method for solving Poisson equation.
!
! External potential is calculated by pseudopotentials.
!
! make pwp for serial calculation
! make pwpmpi for MPI
!-----------------------------------------------------------------------
implicit none
include 'mpif.h'
integer :: world ! global, default communicator
integer :: nodes ! the number of nodes in communicator world
integer :: myid ! node ID in communicator world
integer :: world_qm ! communicator
integer :: ierr
!-----Start parallel environment and keep my node ID
call MPI_INIT(ierr)
world = MPI_COMM_WORLD
call MPI_COMM_RANK( world, myid, ierr )
call MPI_COMM_SIZE( world, nodes, ierr )
if( myid == 0 ) then
write(*,*) 'Starting--nodes = ', nodes
end if
call get_newcomm( myid, nodes, world, world_qm )
!------internode syncronization
call MPI_BARRIER(world,ierr)
!-----release communicator
call MPI_COMM_FREE( world_qm, ierr )
!-----Finalize the parallel environment
call MPI_FINALIZE(ierr)
stop
end program
subroutine get_newcomm( myid, nodes, world, world_qm )
!-----------------------------------------------------------------------
! define and create new communicators
!-----------------------------------------------------------------------
implicit none
include 'mpif.h'
integer :: myid, nodes, world, world_qm
!-----declare local variables
integer :: group ! group of communicator world
!integer, allocatable, dimension(:) :: extract_ids ! node IDs to be extracted
integer, allocatable, dimension(:) :: include_ids ! node IDs to be included
integer :: iworld ! temporal communicator
integer :: igroup ! temporal group
!integer :: nextract
integer :: i, nmd, ii
integer :: status
integer :: ierr
!-----extract group from communicator
call MPI_COMM_GROUP( world, group, ierr )
!-----allocate memory for include_ids
allocate( include_ids(0:nodes-1), stat=status )
!-----create new communicator for QM nodes
!nextract = nodes - 1
do nmd = 0, nodes - 1
call MPI_BARRIER(world,ierr)
if( myid == 0 ) write(*,*) 'check_newcomm do-start', nmd
ii = - 1
do i = nmd, nmd
ii = ii + 1
include_ids(ii) = i
end do
!-----create new group by extracting some nodes
! call MPI_GROUP_EXCL( group, nextract, extract_ids(0), igroup, ierr )
call MPI_GROUP_INCL( group, 1, include_ids(0), igroup, ierr )
if( ierr /= MPI_SUCCESS ) then
write(*,*) 'group_incl: ierr =',ierr
stop
end if
!-----create new communicator from new group
call MPI_COMM_CREATE( world, igroup, iworld, ierr )
if( ierr /= MPI_SUCCESS ) then
write(*,*) 'comm_create: ierr =',ierr
stop
end if
!-----release group
call MPI_GROUP_FREE( igroup, ierr )
if( myid >= nmd .and. myid <= nmd ) then
world_qm = iworld
end if
call MPI_BARRIER(world,ierr)
if( myid == 0 ) write(*,*) 'check_newcomm do-end'
end do
!-----release group
call MPI_GROUP_FREE( group, ierr )
!-----deallocate memory for include_ids
deallocate( include_ids, stat=status )
return
end subroutine