Unverified Commit 55e48bd8 authored by Graham, Aaron's avatar Graham, Aaron Committed by GitHub
Browse files

Adds gatherv for rank-1 SNK and SLK integers (#336)

* Adds gatherv for rank-1 SNK and SLK integers

* Fix INTENT(IN)/INTENT(INOUT) issues
parent ad0eb974
......@@ -209,6 +209,12 @@ TYPE,EXTENDS(ParEnvType) :: MPI_EnvType
gather_SLK1_MPI_Env_type, &
gather_str1D_MPI_ENV_type, &
gather_str2D_MPI_ENV_type
!> @copybrief ParallelEnv::gatherv_SNK1_MPI_Env_type
!> @copydetails ParallelEnv::gatherv_SNK1_MPI_Env_type
PROCEDURE,PASS,PRIVATE :: gatherv_SNK1_MPI_Env_type
!> @copybrief ParallelEnv::gatherv_SLK1_MPI_Env_type
!> @copydetails ParallelEnv::gatherv_SLK1_MPI_Env_type
PROCEDURE,PASS,PRIVATE :: gatherv_SLK1_MPI_Env_type
!> @copybrief ParallelEnv::gatherv_SSK1_MPI_Env_type
!> @copydetails ParallelEnv::gatherv_SSK1_MPI_Env_type
PROCEDURE,PASS,PRIVATE :: gatherv_SSK1_MPI_Env_type
......@@ -216,7 +222,8 @@ TYPE,EXTENDS(ParEnvType) :: MPI_EnvType
!> @copydetails ParallelEnv::gatherv_SDK1_MPI_Env_type
PROCEDURE,PASS,PRIVATE :: gatherv_SDK1_MPI_Env_type
!> Generic interface for MPI_gatherv
GENERIC :: gatherv => gatherv_SSK1_MPI_Env_type, &
GENERIC :: gatherv => gatherv_SNK1_MPI_Env_type, &
gatherv_SLK1_MPI_Env_type,gatherv_SSK1_MPI_Env_type, &
gatherv_SDK1_MPI_Env_type
!> @copybrief ParallelEnv::scatter_SLK0_MPI_Env_type
!> @copydetails ParallelEnv::scatter_SLK0_MPI_Env_type
......@@ -1242,6 +1249,100 @@ SUBROUTINE gather_str2D_MPI_ENV_type(myPE,sendbuf,root)
ENDSUBROUTINE gather_str2D_MPI_ENV_type
!
!-------------------------------------------------------------------------------
!> @brief Wrapper routine for MPI_gatherv for 1D array of SNK integers
!> @param this the MPI environment
!> @param sendbuf the data to be sent by this process
!> @param recvbuf the data received on the root process
!> @param recvcounts the counts of data received by the root process from other processes
!> @param root the process to gather all the data to; optional, defaults to 0
!>
SUBROUTINE gatherv_SNK1_MPI_Env_type(this,sendbuf,recvbuf,recvcounts,root)
CLASS(MPI_EnvType),INTENT(IN) :: this
INTEGER(SNK),INTENT(IN) :: sendbuf(:)
INTEGER(SNK),INTENT(OUT),ALLOCATABLE :: recvbuf(:)
INTEGER(SIK),INTENT(OUT),ALLOCATABLE :: recvcounts(:)
INTEGER(SIK),INTENT(IN),OPTIONAL :: root
!
INTEGER(SIK) :: rank,i
INTEGER(SIK),ALLOCATABLE :: displs(:)
rank=0
IF(PRESENT(root)) rank=root
REQUIRE(rank >= 0)
REQUIRE(rank < this%nproc)
ALLOCATE(recvcounts(this%nproc))
CALL this%gather(SIZE(sendbuf),recvcounts,rank)
ALLOCATE(displs(this%nproc))
IF(this%rank == rank) THEN
displs(1)=0
DO i=2,this%nproc
displs(i)=displs(i-1)+recvcounts(i-1)
ENDDO !i
ENDIF
ALLOCATE(recvbuf(SUM(recvcounts)))
#ifdef HAVE_MPI
CALL MPI_gatherV(sendbuf,SIZE(sendbuf),MPI_INTEGER4,recvbuf,recvcounts, &
displs,MPI_INTEGER4,rank,this%comm,mpierr)
#else
recvbuf=sendbuf
#endif
IF(this%rank /= rank) THEN
DEALLOCATE(recvbuf)
DEALLOCATE(recvcounts)
ENDIF
ENDSUBROUTINE gatherv_SNK1_MPI_Env_type
!
!-------------------------------------------------------------------------------
!> @brief Wrapper routine for MPI_gatherv for 1D array of SLK integers
!> @param this the MPI environment
!> @param sendbuf the data to be sent by this process
!> @param recvbuf the data received on the root process
!> @param recvcounts the counts of data received by the root process from other processes
!> @param root the process to gather all the data to; optional, defaults to 0
!>
SUBROUTINE gatherv_SLK1_MPI_Env_type(this,sendbuf,recvbuf,recvcounts,root)
CLASS(MPI_EnvType),INTENT(IN) :: this
INTEGER(SLK),INTENT(IN) :: sendbuf(:)
INTEGER(SLK),INTENT(OUT),ALLOCATABLE :: recvbuf(:)
INTEGER(SIK),INTENT(OUT),ALLOCATABLE :: recvcounts(:)
INTEGER(SIK),INTENT(IN),OPTIONAL :: root
!
INTEGER(SIK) :: rank,i
INTEGER(SIK),ALLOCATABLE :: displs(:)
rank=0
IF(PRESENT(root)) rank=root
REQUIRE(rank >= 0)
REQUIRE(rank < this%nproc)
ALLOCATE(recvcounts(this%nproc))
CALL this%gather(SIZE(sendbuf),recvcounts,rank)
ALLOCATE(displs(this%nproc))
IF(this%rank == rank) THEN
displs(1)=0
DO i=2,this%nproc
displs(i)=displs(i-1)+recvcounts(i-1)
ENDDO !i
ENDIF
ALLOCATE(recvbuf(SUM(recvcounts)))
#ifdef HAVE_MPI
CALL MPI_gatherV(sendbuf,SIZE(sendbuf),MPI_INTEGER8,recvbuf,recvcounts, &
displs,MPI_INTEGER8,rank,this%comm,mpierr)
#else
recvbuf=sendbuf
#endif
IF(this%rank /= rank) THEN
DEALLOCATE(recvbuf)
DEALLOCATE(recvcounts)
ENDIF
ENDSUBROUTINE gatherv_SLK1_MPI_Env_type
!
!-------------------------------------------------------------------------------
!> @brief Wrapper routine for MPI_gatherv for 1D array of SRK reals
!> @param this the MPI environment
!> @param sendbuf the data to be sent by this process
......@@ -1405,7 +1506,7 @@ ENDSUBROUTINE scatter_SLK1_MPI_Env_type
!>
SUBROUTINE bcast_SNK0_MPI_Env_type(myPE,buf,root)
CLASS(MPI_EnvType),INTENT(IN) :: myPE
INTEGER(SNK),INTENT(IN) :: buf
INTEGER(SNK),INTENT(INOUT) :: buf
INTEGER(SIK),INTENT(IN),OPTIONAL :: root
INTEGER(SIK) :: rank
rank=0
......@@ -1427,7 +1528,7 @@ ENDSUBROUTINE bcast_SNK0_MPI_Env_type
!>
SUBROUTINE bcast_SNK1_MPI_Env_type(myPE,buf,root)
CLASS(MPI_EnvType),INTENT(IN) :: myPE
INTEGER(SNK),INTENT(IN) :: buf(:)
INTEGER(SNK),INTENT(INOUT) :: buf(:)
INTEGER(SIK),INTENT(IN),OPTIONAL :: root
INTEGER(SIK) :: rank
rank=0
......@@ -1449,7 +1550,7 @@ ENDSUBROUTINE bcast_SNK1_MPI_Env_type
!>
SUBROUTINE bcast_SLK0_MPI_Env_type(myPE,buf,root)
CLASS(MPI_EnvType),INTENT(IN) :: myPE
INTEGER(SLK),INTENT(IN) :: buf
INTEGER(SLK),INTENT(INOUT) :: buf
INTEGER(SIK),INTENT(IN),OPTIONAL :: root
INTEGER(SIK) :: rank
rank=0
......@@ -1471,7 +1572,7 @@ ENDSUBROUTINE bcast_SLK0_MPI_Env_type
!>
SUBROUTINE bcast_SLK1_MPI_Env_type(myPE,buf,root)
CLASS(MPI_EnvType),INTENT(IN) :: myPE
INTEGER(SLK),INTENT(IN) :: buf(:)
INTEGER(SLK),INTENT(INOUT) :: buf(:)
INTEGER(SIK),INTENT(IN),OPTIONAL :: root
INTEGER(SIK) :: rank
rank=0
......@@ -1493,7 +1594,7 @@ ENDSUBROUTINE bcast_SLK1_MPI_Env_type
!>
SUBROUTINE bcast_SSK0_MPI_Env_type(myPE,buf,root)
CLASS(MPI_EnvType),INTENT(IN) :: myPE
REAL(SSK),INTENT(IN) :: buf
REAL(SSK),INTENT(INOUT) :: buf
INTEGER(SIK),INTENT(IN),OPTIONAL :: root
INTEGER(SIK) :: rank
rank=0
......@@ -1515,7 +1616,7 @@ ENDSUBROUTINE bcast_SSK0_MPI_Env_type
!>
SUBROUTINE bcast_SSK1_MPI_Env_type(myPE,buf,root)
CLASS(MPI_EnvType),INTENT(IN) :: myPE
REAL(SSK),INTENT(IN) :: buf(:)
REAL(SSK),INTENT(INOUT) :: buf(:)
INTEGER(SIK),INTENT(IN),OPTIONAL :: root
INTEGER(SIK) :: rank
rank=0
......@@ -1537,7 +1638,7 @@ ENDSUBROUTINE bcast_SSK1_MPI_Env_type
!>
SUBROUTINE bcast_SSK2_MPI_Env_type(myPE,buf,root)
CLASS(MPI_EnvType),INTENT(IN) :: myPE
REAL(SSK),INTENT(IN) :: buf(:,:)
REAL(SSK),INTENT(INOUT) :: buf(:,:)
INTEGER(SIK),INTENT(IN),OPTIONAL :: root
INTEGER(SIK) :: rank
rank=0
......@@ -1559,7 +1660,7 @@ ENDSUBROUTINE bcast_SSK2_MPI_Env_type
!>
SUBROUTINE bcast_SDK0_MPI_Env_type(myPE,buf,root)
CLASS(MPI_EnvType),INTENT(IN) :: myPE
REAL(SDK),INTENT(IN) :: buf
REAL(SDK),INTENT(INOUT) :: buf
INTEGER(SIK),INTENT(IN),OPTIONAL :: root
INTEGER(SIK) :: rank
rank=0
......@@ -1581,7 +1682,7 @@ ENDSUBROUTINE bcast_SDK0_MPI_Env_type
!>
SUBROUTINE bcast_SDK1_MPI_Env_type(myPE,buf,root)
CLASS(MPI_EnvType),INTENT(IN) :: myPE
REAL(SDK),INTENT(IN) :: buf(:)
REAL(SDK),INTENT(INOUT) :: buf(:)
INTEGER(SIK),INTENT(IN),OPTIONAL :: root
INTEGER(SIK) :: rank
rank=0
......@@ -1603,7 +1704,7 @@ ENDSUBROUTINE bcast_SDK1_MPI_Env_type
!>
SUBROUTINE bcast_SDK2_MPI_Env_type(myPE,buf,root)
CLASS(MPI_EnvType),INTENT(IN) :: myPE
REAL(SDK),INTENT(IN) :: buf(:,:)
REAL(SDK),INTENT(INOUT) :: buf(:,:)
INTEGER(SIK),INTENT(IN),OPTIONAL :: root
INTEGER(SIK) :: rank
rank=0
......@@ -1625,7 +1726,7 @@ ENDSUBROUTINE bcast_SDK2_MPI_Env_type
!>
SUBROUTINE bcast_SSK3_MPI_Env_type(myPE,buf,root)
CLASS(MPI_EnvType),INTENT(IN) :: myPE
REAL(SSK),INTENT(IN) :: buf(:,:,:)
REAL(SSK),INTENT(INOUT) :: buf(:,:,:)
INTEGER(SIK),INTENT(IN),OPTIONAL :: root
INTEGER(SIK) :: rank
rank=0
......@@ -1647,7 +1748,7 @@ ENDSUBROUTINE bcast_SSK3_MPI_Env_type
!>
SUBROUTINE bcast_SDK3_MPI_Env_type(myPE,buf,root)
CLASS(MPI_EnvType),INTENT(IN) :: myPE
REAL(SDK),INTENT(IN) :: buf(:,:,:)
REAL(SDK),INTENT(INOUT) :: buf(:,:,:)
INTEGER(SIK),INTENT(IN),OPTIONAL :: root
INTEGER(SIK) :: rank
rank=0
......@@ -1669,7 +1770,7 @@ ENDSUBROUTINE bcast_SDK3_MPI_Env_type
!>
SUBROUTINE bcast_SSK4_MPI_Env_type(myPE,buf,root)
CLASS(MPI_EnvType),INTENT(IN) :: myPE
REAL(SSK),INTENT(IN) :: buf(:,:,:,:)
REAL(SSK),INTENT(INOUT) :: buf(:,:,:,:)
INTEGER(SIK),INTENT(IN),OPTIONAL :: root
INTEGER(SIK) :: rank
rank=0
......@@ -1691,7 +1792,7 @@ ENDSUBROUTINE bcast_SSK4_MPI_Env_type
!>
SUBROUTINE bcast_SDK4_MPI_Env_type(myPE,buf,root)
CLASS(MPI_EnvType),INTENT(IN) :: myPE
REAL(SDK),INTENT(IN) :: buf(:,:,:,:)
REAL(SDK),INTENT(INOUT) :: buf(:,:,:,:)
INTEGER(SIK),INTENT(IN),OPTIONAL :: root
INTEGER(SIK) :: rank
rank=0
......
......@@ -119,7 +119,8 @@ SUBROUTINE testMPIEnv()
REAL(SDK),ALLOCATABLE :: sendSDK1(:),recvSDK1(:)
INTEGER(SIK),ALLOCATABLE :: ranks_SIK(:),ranks2_SIK(:,:)
INTEGER(SIK),ALLOCATABLE :: testIDX(:),testWGT(:),recvcounts(:)
INTEGER(SLK),ALLOCATABLE :: ranks(:),ranks2(:,:)
INTEGER(SNK),ALLOCATABLE :: sendSNK1(:),recvSNK1(:)
INTEGER(SLK),ALLOCATABLE :: ranks(:),ranks2(:,:),sendSLK1(:),recvSLK1(:)
LOGICAL(SBK) :: bool,bool1d(10),bool2d(2,5),bool3d(2,5,2),bool4d(2,5,2,5)
TYPE(MPI_EnvType) :: testMPI,testMPI2
CHARACTER(LEN=8) :: tmpChar
......@@ -349,6 +350,109 @@ SUBROUTINE testMPIEnv()
DEALLOCATE(recvcounts)
ENDIF
IF(testMPI%nproc > 1) THEN
IF(testMPI%rank == 0) THEN
sendSLK1 = [1_SLK, 2_SLK, 3_SLK]
ELSE
sendSLK1 = [4_SLK, 5_SLK]
ENDIF
CALL testMPI%gatherv(sendSLK1,recvSLK1,recvcounts)
IF(testMPI%rank == 0) THEN
ASSERT_EQ(SIZE(recvSLK1),5,'SIZE receive')
ASSERT_EQ(recvSLK1(1),1_SLK,'receive(1)')
ASSERT_EQ(recvSLK1(2),2_SLK,'receive(2)')
ASSERT_EQ(recvSLK1(3),3_SLK,'receive(3)')
ASSERT_EQ(recvSLK1(4),4_SLK,'receive(4)')
ASSERT_EQ(recvSLK1(5),5_SLK,'receive(5)')
ASSERT_EQ(recvcounts(1),3,'recvcounts(1)')
ASSERT_EQ(recvcounts(2),2,'recvcounts(2)')
DEALLOCATE(recvSLK1)
DEALLOCATE(recvcounts)
ELSE
ASSERT(.NOT.ALLOCATED(recvSLK1),'non-root receive')
ASSERT(.NOT.ALLOCATED(recvcounts),'non-root recvcounts')
ENDIF
CALL testMPI%gatherv(sendSLK1,recvSLK1,recvcounts,1)
IF(testMPI%rank == 1) THEN
ASSERT_EQ(SIZE(recvSLK1),5,'SIZE receive')
ASSERT_EQ(recvSLK1(1),1_SLK,'receive(1)')
ASSERT_EQ(recvSLK1(2),2_SLK,'receive(2)')
ASSERT_EQ(recvSLK1(3),3_SLK,'receive(3)')
ASSERT_EQ(recvSLK1(4),4_SLK,'receive(4)')
ASSERT_EQ(recvSLK1(5),5_SLK,'receive(5)')
ASSERT_EQ(recvcounts(1),3,'recvcounts(1)')
ASSERT_EQ(recvcounts(2),2,'recvcounts(2)')
DEALLOCATE(recvSLK1)
DEALLOCATE(recvcounts)
ELSE
ASSERT(.NOT.ALLOCATED(recvSLK1),'non-root receive')
ASSERT(.NOT.ALLOCATED(recvcounts),'non-root recvcounts')
ENDIF
ELSE
sendSLK1 = [1_SLK, 2_SLK, 3_SLK, 4_SLK, 5_SLK]
CALL testMPI%gatherv(sendSLK1,recvSLK1,recvcounts)
ASSERT_EQ(SIZE(recvSLK1),5,'SIZE receive')
ASSERT_EQ(recvSLK1(1),1_SLK,'receive(1)')
ASSERT_EQ(recvSLK1(2),2_SLK,'receive(2)')
ASSERT_EQ(recvSLK1(3),3_SLK,'receive(3)')
ASSERT_EQ(recvSLK1(4),4_SLK,'receive(4)')
ASSERT_EQ(recvSLK1(5),5_SLK,'receive(5)')
ASSERT_EQ(recvcounts(1),5,'recvcounts(1)')
DEALLOCATE(recvSLK1)
DEALLOCATE(recvcounts)
ENDIF
IF(testMPI%nproc > 1) THEN
IF(testMPI%rank == 0) THEN
sendSNK1 = [1_SNK, 2_SNK, 3_SNK]
ELSE
sendSNK1 = [4_SNK, 5_SNK]
ENDIF
CALL testMPI%gatherv(sendSNK1,recvSNK1,recvcounts)
IF(testMPI%rank == 0) THEN
ASSERT_EQ(SIZE(recvSNK1),5,'SIZE receive')
ASSERT_EQ(recvSNK1(1),1_SNK,'receive(1)')
ASSERT_EQ(recvSNK1(2),2_SNK,'receive(2)')
ASSERT_EQ(recvSNK1(3),3_SNK,'receive(3)')
ASSERT_EQ(recvSNK1(4),4_SNK,'receive(4)')
ASSERT_EQ(recvSNK1(5),5_SNK,'receive(5)')
ASSERT_EQ(recvcounts(1),3,'recvcounts(1)')
ASSERT_EQ(recvcounts(2),2,'recvcounts(2)')
DEALLOCATE(recvSNK1)
DEALLOCATE(recvcounts)
ELSE
ASSERT(.NOT.ALLOCATED(recvSNK1),'non-root receive')
ASSERT(.NOT.ALLOCATED(recvcounts),'non-root recvcounts')
ENDIF
CALL testMPI%gatherv(sendSNK1,recvSNK1,recvcounts,1)
IF(testMPI%rank == 1) THEN
ASSERT_EQ(SIZE(recvSNK1),5,'SIZE receive')
ASSERT_EQ(recvSNK1(1),1_SNK,'receive(1)')
ASSERT_EQ(recvSNK1(2),2_SNK,'receive(2)')
ASSERT_EQ(recvSNK1(3),3_SNK,'receive(3)')
ASSERT_EQ(recvSNK1(4),4_SNK,'receive(4)')
ASSERT_EQ(recvSNK1(5),5_SNK,'receive(5)')
ASSERT_EQ(recvcounts(1),3,'recvcounts(1)')
ASSERT_EQ(recvcounts(2),2,'recvcounts(2)')
DEALLOCATE(recvSNK1)
DEALLOCATE(recvcounts)
ELSE
ASSERT(.NOT.ALLOCATED(recvSNK1),'non-root receive')
ASSERT(.NOT.ALLOCATED(recvcounts),'non-root recvcounts')
ENDIF
ELSE
sendSNK1 = [1_SNK, 2_SNK, 3_SNK, 4_SNK, 5_SNK]
CALL testMPI%gatherv(sendSNK1,recvSNK1,recvcounts)
ASSERT_EQ(SIZE(recvSNK1),5,'SIZE receive')
ASSERT_EQ(recvSNK1(1),1_SNK,'receive(1)')
ASSERT_EQ(recvSNK1(2),2_SNK,'receive(2)')
ASSERT_EQ(recvSNK1(3),3_SNK,'receive(3)')
ASSERT_EQ(recvSNK1(4),4_SNK,'receive(4)')
ASSERT_EQ(recvSNK1(5),5_SNK,'receive(5)')
ASSERT_EQ(recvcounts(1),5,'recvcounts(1)')
DEALLOCATE(recvSNK1)
DEALLOCATE(recvcounts)
ENDIF
IF(testMPI%nproc > 1) THEN
IF(testMPI%rank == 0) THEN
sendSDK1 = [1.0_SDK, 2.0_SDK, 3.0_SDK]
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment