Commit d034732f authored by aarograh's avatar aarograh Committed by Henderson, Shane
Browse files

Adds interfaces for explicitly sending rank 2 and rank 3 int/real/str

Squash branch 'more_parallel_interfaces' into 'master'

* Add getRow method for Petsc matrix

* Add more send/recv/gather interfaces

* Fix bad myName name

* Fix unused variables

* Add rmdir test

* Implement rmdir on HDF5 file type

Adds interfaces for explicitly sending rank 2 and rank 3 int/real/str

<!-- Include a link to VERA development issues if appropriate, or delete this line -->

**Developer Checklist:**
- [x] Have you done a self-review after creating the merge request?
- [x] Have you filled in the Merge Request information (title, description) thoroughly?
- [x] Have you updated the relevant tickets (if this MR is linked to any VERA-dev tickets)?
- [x] Have you addressed all suggested feedback and commented on it to let the reviewer know? (Do not resolve discussions that the reviewer started)

**Reviewer Checklist:**
- [x] Have you confirmed all discussions were adequately addressed and resolved them all?
- [x] Does it conform to formatting guidelines?
- [x] Are there adequate and clear comments?
- [x] Is the design clean and sensible?
- [x] Are the changes optimal/efficient?
- [x] Were sufficient DBC checks added?
- [x] Are there unit tests? (if necessary)
- [ ] Is the MR description clear, including a link to the VERA-Dev issue if appropriate?

**PSM Checklist**
- [x] Have you confirmed that all discussions were addressed, or that follow-on issues have been created for them?
- [x] Have you confirmed sufficient testing was conducted?
- [x] Does this impact other repositories?
- [x] Does the MR have an adequate description?
- [x] If the MR has multiple commits, did you set the MR to squash merge?

See merge request https://code.ornl.gov/futility/Futility/-/merge_requests/412
parent 8e57d5c1
Loading
Loading
Loading
Loading
+1 −1
Original line number Diff line number Diff line
@@ -1107,7 +1107,7 @@ RECURSIVE SUBROUTINE rmdir_HDF5FileType(thisHDF5File,path)
  CLASS(HDF5FileType),INTENT(INOUT) :: thisHDF5File
  CHARACTER(LEN=*),INTENT(IN) :: path
#ifdef FUTILITY_HAVE_HDF5
  CHARACTER(LEN=*),PARAMETER :: myName='mkdir_HDF5FileType'
  CHARACTER(LEN=*),PARAMETER :: myName='rmdir_HDF5FileType'
  TYPE(StringType) :: path2
  LOGICAL :: dset_exists

+24 −0
Original line number Diff line number Diff line
@@ -9,6 +9,8 @@
!> @brief Base abstraction for the MatrixTypes
!++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++!
MODULE MatrixTypes_Base
#include "Futility_DBC.h"
USE Futility_DBC
USE IntrType
USE ParameterLists
USE ExceptionHandler
@@ -61,6 +63,8 @@ TYPE,ABSTRACT :: MatrixType
    !> Deferred routine for getting a matrix value
    PROCEDURE(matrix_get_sub_absintfc),DEFERRED,PASS :: get
    !> Deferred routine for getting a matrix value
    PROCEDURE,PASS :: getRow
    !> Deferred routine for getting a matrix value
    PROCEDURE(matrix_transpose_sub_absintfc),DEFERRED,PASS :: transpose
ENDTYPE MatrixType
!
@@ -213,6 +217,26 @@ CHARACTER(LEN=*),PARAMETER :: modName='MATRIXTYPES_BASE'
CONTAINS
!
!-------------------------------------------------------------------------------
!> @brief Get a row of the matrix
!> @param this The matrix object
!> @param row The row to get
!> @param rowval The values in the row
!>
SUBROUTINE getRow(this, row, rowval)
  CLASS(MatrixType),INTENT(INOUT) :: this
  INTEGER(SIK),INTENT(IN) :: row
  REAL(SRK),INTENT(OUT) :: rowval(:)
  !
  INTEGER(SIK) :: i

  REQUIRE(SIZE(rowval) == this%n)
  DO i = 1, this%n
    CALL this%get(row, i, rowval(i))
  ENDDO !i

ENDSUBROUTINE getRow
!
!-------------------------------------------------------------------------------
!> @brief Subroutine that sets up the default parameter lists for the all
!>        MatrixTypes including Sparse, Tri-Diagonal, Dense Rectangular, Dense
!>        Square, and PETSc.
+37 −0
Original line number Diff line number Diff line
@@ -70,6 +70,9 @@ TYPE,EXTENDS(DistributedMatrixType) :: PETScMatrixType
    !> @copybrief MatrixTypes::get_PETScMatrixType
    !> @copydetails MatrixTypes::get_PETScMatrixType
    PROCEDURE,PASS :: get => get_PETScMatrixType
    !> @copybrief MatrixTypes::getRow_PETScMatrixType
    !> @copydetails MatrixTypes::getRow_PETScMatrixType
    PROCEDURE,PASS :: getRow => getRow_PETScMatrixType
    !> @copybrief MatrixTypes::assemble_PETScMatrixType
    !> @copydetails MatrixTypes::assemble_PETScMatrixType
    PROCEDURE,PASS :: assemble => assemble_PETScMatrixType
@@ -291,6 +294,40 @@ SUBROUTINE get_PETScMatrixType(matrix,i,j,getval)
ENDSUBROUTINE get_PETScMatrixtype
!
!-------------------------------------------------------------------------------
!> @brief Gets the values in a PETSc matrix row - presently untested
!> @param this the matrix type to act on
!> @param row the row index in the matrix
!> @param rowval the resulting row values
!>
!> This routine gets a row of the sparse matrix.
!>
SUBROUTINE getRow_PETScMatrixType(this,row,rowval)
  CLASS(PETScMatrixType),INTENT(INOUT) :: this
  INTEGER(SIK),INTENT(IN) :: row
  REAL(SRK),INTENT(OUT) :: rowval(:)
  !
  PetscErrorCode  :: ierr
  INTEGER(SIK) :: nnz
  INTEGER(SIK) :: cols(this%n)
  REAL(SRK) :: vals(this%n)

  rowval=0.0_SRK
  IF(this%isInit) THEN
    ! assemble matrix if necessary
    IF (.NOT.(this%isAssembled)) CALL this%assemble()

    IF((row <= this%n) .AND. (SIZE(rowval) == this%n)) THEN
      CALL MatGetRow(this%a,row-1,nnz,cols,vals,ierr)
      rowval(cols(1:nnz)+1)=vals(1:nnz)
      CALL MatRestoreRow(this%a,row-1,nnz,cols,vals,ierr)
    ELSE
      CALL eMatrixType%raiseError('Incorrect call to '// &
          modName//'::getRow_PETScMatrixType - row index incorrect.')
    ENDIF
  ENDIF
ENDSUBROUTINE getRow_PETScMatrixtype
!
!-------------------------------------------------------------------------------
SUBROUTINE assemble_PETScMatrixType(thisMatrix,ierr)
  CLASS(PETScMatrixType),INTENT(INOUT) :: thisMatrix
  INTEGER(SIK),INTENT(OUT),OPTIONAL :: ierr
+302 −17
Original line number Diff line number Diff line
@@ -170,11 +170,27 @@ TYPE,EXTENDS(ParEnvType) :: MPI_EnvType
    !> @copybrief ParallelEnv::recv_INT1_MPI_Env_type
    !> @copydetails ParallelEnv::recv_INT1_MPI_Env_type
    PROCEDURE,PASS,PRIVATE :: recv_INT1_MPI_Env_type
    !> @copybrief ParallelEnv::recv_REAL2_MPI_Env_type
    !> @copydetails ParallelEnv::recv_REAL2_MPI_Env_type
    PROCEDURE,PASS,PRIVATE :: recv_REAL2_MPI_Env_type
    !> @copybrief ParallelEnv::recv_INT2_MPI_Env_type
    !> @copydetails ParallelEnv::recv_INT2_MPI_Env_type
    PROCEDURE,PASS,PRIVATE :: recv_INT2_MPI_Env_type
    !> @copybrief ParallelEnv::recv_REAL3_MPI_Env_type
    !> @copydetails ParallelEnv::recv_REAL3_MPI_Env_type
    PROCEDURE,PASS,PRIVATE :: recv_REAL3_MPI_Env_type
    !> @copybrief ParallelEnv::recv_INT3_MPI_Env_type
    !> @copydetails ParallelEnv::recv_INT3_MPI_Env_type
    PROCEDURE,PASS,PRIVATE :: recv_INT3_MPI_Env_type
    GENERIC :: recv => recv_CHAR_MPI_Env_type, &
        recv_REAL_MPI_Env_type, &
        recv_INT_MPI_Env_type, &
        recv_REAL1_MPI_Env_type, &
        recv_INT1_MPI_Env_type
        recv_INT1_MPI_Env_type, &
        recv_REAL2_MPI_Env_type, &
        recv_INT2_MPI_Env_type, &
        recv_REAL3_MPI_Env_type, &
        recv_INT3_MPI_Env_type

    !> @copybrief ParallelEnv::send_CHAR_MPI_Env_type
    !> @copydetails ParallelEnv::send_CHAR_MPI_Env_type
@@ -191,12 +207,31 @@ TYPE,EXTENDS(ParEnvType) :: MPI_EnvType
    !> @copybrief ParallelEnv::send_INT1_MPI_Env_type
    !> @copydetails ParallelEnv::send_INT1_MPI_Env_type
    PROCEDURE,PASS,PRIVATE :: send_INT1_MPI_Env_type
    !> @copybrief ParallelEnv::send_REAL2_MPI_Env_type
    !> @copydetails ParallelEnv::send_REAL2_MPI_Env_type
    PROCEDURE,PASS,PRIVATE :: send_REAL2_MPI_Env_type
    !> @copybrief ParallelEnv::send_INT2_MPI_Env_type
    !> @copydetails ParallelEnv::send_INT2_MPI_Env_type
    PROCEDURE,PASS,PRIVATE :: send_INT2_MPI_Env_type
    !> @copybrief ParallelEnv::send_REAL3_MPI_Env_type
    !> @copydetails ParallelEnv::send_REAL3_MPI_Env_type
    PROCEDURE,PASS,PRIVATE :: send_REAL3_MPI_Env_type
    !> @copybrief ParallelEnv::send_INT3_MPI_Env_type
    !> @copydetails ParallelEnv::send_INT3_MPI_Env_type
    PROCEDURE,PASS,PRIVATE :: send_INT3_MPI_Env_type
    GENERIC :: send => send_CHAR_MPI_Env_type, &
        send_REAL_MPI_Env_type, &
        send_INT_MPI_Env_type, &
        send_REAL1_MPI_Env_type, &
        send_INT1_MPI_Env_type

        send_INT1_MPI_Env_type, &
        send_REAL2_MPI_Env_type, &
        send_INT2_MPI_Env_type, &
        send_REAL3_MPI_Env_type, &
        send_INT3_MPI_Env_type

    !> @copybrief ParallelEnv::gather_SBK0_MPI_Env_type
    !> @copydetails ParallelEnv::gather_SBK0_MPI_Env_type
    PROCEDURE,PASS,PRIVATE :: gather_SBK0_MPI_Env_type
    !> @copybrief ParallelEnv::gather_SNK0_MPI_Env_type
    !> @copydetails ParallelEnv::gather_SNK0_MPI_Env_type
    PROCEDURE,PASS,PRIVATE :: gather_SNK0_MPI_Env_type
@@ -221,19 +256,19 @@ TYPE,EXTENDS(ParEnvType) :: MPI_EnvType
    !> @copybrief ParallelEnv::gather_SDK1_MPI_Env_type
    !> @copydetails ParallelEnv::gather_SDK1_MPI_Env_type
    PROCEDURE,PASS,PRIVATE :: gather_SDK1_MPI_Env_type
    !> @copybrief ParallelEnv::gather_str1D_MPI_ENV_type
    !> @copydetails ParallelEnv::gather_str1D_MPI_ENV_type
    PROCEDURE,PASS,PRIVATE :: gather_str1D_MPI_ENV_type
    !> @copybrief ParallelEnv::gather_str2D_MPI_ENV_type
    !> @copydetails ParallelEnv::gather_str2D_MPI_ENV_type
    PROCEDURE,PASS,PRIVATE :: gather_str2D_MPI_ENV_type
    !>
    GENERIC :: gather => gather_SNK0_MPI_Env_type,gather_SNK1_MPI_Env_type, &
        gather_SLK0_MPI_Env_type,gather_SLK1_MPI_Env_type, &
        gather_SSK0_MPI_Env_type,gather_SSK1_MPI_Env_type, &
        gather_SDK0_MPI_Env_type,gather_SDK1_MPI_Env_type, &
        gather_str1D_MPI_ENV_type, &
        gather_str2D_MPI_ENV_type
    !> @copybrief ParallelEnv::gather_str1D_MPI_Env_type
    !> @copydetails ParallelEnv::gather_str1D_MPI_Env_type
    PROCEDURE,PASS,PRIVATE :: gather_str1D_MPI_Env_type
    !> @copybrief ParallelEnv::gather_str2D_MPI_Env_type
    !> @copydetails ParallelEnv::gather_str2D_MPI_Env_type
    PROCEDURE,PASS,PRIVATE :: gather_str2D_MPI_Env_type
    !>
    GENERIC :: gather => gather_SBK0_MPI_Env_type, gather_SNK0_MPI_Env_type, &
        gather_SNK1_MPI_Env_type, gather_SLK0_MPI_Env_type, &
        gather_SLK1_MPI_Env_type, gather_SSK0_MPI_Env_type, &
        gather_SSK1_MPI_Env_type, gather_SDK0_MPI_Env_type, &
        gather_SDK1_MPI_Env_type, gather_str1D_MPI_Env_type, &
        gather_str2D_MPI_Env_type
    !> @copybrief ParallelEnv::gatherv_SNK1_MPI_Env_type
    !> @copydetails ParallelEnv::gatherv_SNK1_MPI_Env_type
    PROCEDURE,PASS,PRIVATE :: gatherv_SNK1_MPI_Env_type
@@ -831,7 +866,7 @@ ENDSUBROUTINE send_INT_MPI_Env_type
!>
SUBROUTINE send_INT1_MPI_Env_type(myPE,sendbuf,n,destProc,in_tag)
  CLASS(MPI_EnvType),INTENT(IN) :: myPE
  INTEGER(SIK),INTENT(IN) :: sendbuf(*)
  INTEGER(SIK),INTENT(IN) :: sendbuf(:)
  INTEGER(SIK),INTENT(IN) :: n
  INTEGER(SIK),INTENT(IN) :: destProc
  INTEGER(SIK),INTENT(IN),OPTIONAL :: in_tag
@@ -849,6 +884,60 @@ SUBROUTINE send_INT1_MPI_Env_type(myPE,sendbuf,n,destProc,in_tag)
ENDSUBROUTINE send_INT1_MPI_Env_type
!
!-------------------------------------------------------------------------------
!> @brief Wrapper routine calls MPI_Send for ints
!> @param myPE parallel environment where the communication originates
!> @param n the number of elements to be sent
!> @param sendbuf the data which is to be sent
!> @param destProc the rank of the recieving proc in myPE
!> @param in_tag message id which can be provided to distiguish messages
!>
SUBROUTINE send_INT2_MPI_Env_type(myPE,sendbuf,n,destProc,in_tag)
  CLASS(MPI_EnvType),INTENT(IN) :: myPE
  INTEGER(SIK),INTENT(IN) :: sendbuf(:,:)
  INTEGER(SIK),INTENT(IN) :: n
  INTEGER(SIK),INTENT(IN) :: destProc
  INTEGER(SIK),INTENT(IN),OPTIONAL :: in_tag
  !
#ifdef HAVE_MPI
  INTEGER(SIK) :: tag
  tag=1
  IF(PRESENT(in_tag)) tag=in_tag
#ifdef DBLINT
  CALL MPI_send(sendBuf,n,MPI_INTEGER8,destProc,tag,myPE%comm,mpierr)
#else
  CALL MPI_send(sendBuf,n,MPI_INTEGER,destProc,tag,myPE%comm,mpierr)
#endif
#endif
ENDSUBROUTINE send_INT2_MPI_Env_type
!
!-------------------------------------------------------------------------------
!> @brief Wrapper routine calls MPI_Send for ints
!> @param myPE parallel environment where the communication originates
!> @param n the number of elements to be sent
!> @param sendbuf the data which is to be sent
!> @param destProc the rank of the recieving proc in myPE
!> @param in_tag message id which can be provided to distiguish messages
!>
SUBROUTINE send_INT3_MPI_Env_type(myPE,sendbuf,n,destProc,in_tag)
  CLASS(MPI_EnvType),INTENT(IN) :: myPE
  INTEGER(SIK),INTENT(IN) :: sendbuf(:,:,:)
  INTEGER(SIK),INTENT(IN) :: n
  INTEGER(SIK),INTENT(IN) :: destProc
  INTEGER(SIK),INTENT(IN),OPTIONAL :: in_tag
  !
#ifdef HAVE_MPI
  INTEGER(SIK) :: tag
  tag=1
  IF(PRESENT(in_tag)) tag=in_tag
#ifdef DBLINT
  CALL MPI_send(sendBuf,n,MPI_INTEGER8,destProc,tag,myPE%comm,mpierr)
#else
  CALL MPI_send(sendBuf,n,MPI_INTEGER,destProc,tag,myPE%comm,mpierr)
#endif
#endif
ENDSUBROUTINE send_INT3_MPI_Env_type
!
!-------------------------------------------------------------------------------
!> @brief Wrapper routine calls MPI_Send for reals
!> @param myPE parallel environment where the communication originates
!> @param sendbuf the data which is to be sent
@@ -883,7 +972,7 @@ ENDSUBROUTINE send_REAL_MPI_Env_type
!>
SUBROUTINE send_REAL1_MPI_Env_type(myPE,sendbuf,n,destProc,in_tag)
  CLASS(MPI_EnvType),INTENT(IN) :: myPE
  REAL(SRK),INTENT(IN) :: sendbuf(*)
  REAL(SRK),INTENT(IN) :: sendbuf(:)
  INTEGER(SIK),INTENT(IN) :: n
  INTEGER(SIK),INTENT(IN) :: destProc
  INTEGER(SIK),INTENT(IN),OPTIONAL :: in_tag
@@ -901,6 +990,60 @@ SUBROUTINE send_REAL1_MPI_Env_type(myPE,sendbuf,n,destProc,in_tag)
ENDSUBROUTINE send_REAL1_MPI_Env_type
!
!-------------------------------------------------------------------------------
!> @brief Wrapper routine calls MPI_Send for reals
!> @param myPE parallel environment where the communication originates
!> @param n the number of elements to be sent
!> @param sendbuf the data which is to be sent
!> @param destProc the rank of the recieving proc in myPE
!> @param in_tag message id which can be provided to distiguish messages
!>
SUBROUTINE send_REAL2_MPI_Env_type(myPE,sendbuf,n,destProc,in_tag)
  CLASS(MPI_EnvType),INTENT(IN) :: myPE
  REAL(SRK),INTENT(IN) :: sendbuf(:,:)
  INTEGER(SIK),INTENT(IN) :: n
  INTEGER(SIK),INTENT(IN) :: destProc
  INTEGER(SIK),INTENT(IN),OPTIONAL :: in_tag
  !
#ifdef HAVE_MPI
  INTEGER(SIK) :: tag
  tag=1
  IF(PRESENT(in_tag)) tag=in_tag
#ifdef DBL
  CALL MPI_send(sendBuf,n,MPI_DOUBLE_PRECISION,destProc,tag,myPE%comm,mpierr)
#else
  CALL MPI_send(sendBuf,n,MPI_SINGLE_PRECISION,destProc,tag,myPE%comm,mpierr)
#endif
#endif
ENDSUBROUTINE send_REAL2_MPI_Env_type
!
!-------------------------------------------------------------------------------
!> @brief Wrapper routine calls MPI_Send for reals
!> @param myPE parallel environment where the communication originates
!> @param n the number of elements to be sent
!> @param sendbuf the data which is to be sent
!> @param destProc the rank of the recieving proc in myPE
!> @param in_tag message id which can be provided to distiguish messages
!>
SUBROUTINE send_REAL3_MPI_Env_type(myPE,sendbuf,n,destProc,in_tag)
  CLASS(MPI_EnvType),INTENT(IN) :: myPE
  REAL(SRK),INTENT(IN) :: sendbuf(:,:,:)
  INTEGER(SIK),INTENT(IN) :: n
  INTEGER(SIK),INTENT(IN) :: destProc
  INTEGER(SIK),INTENT(IN),OPTIONAL :: in_tag
  !
#ifdef HAVE_MPI
  INTEGER(SIK) :: tag
  tag=1
  IF(PRESENT(in_tag)) tag=in_tag
#ifdef DBL
  CALL MPI_send(sendBuf,n,MPI_DOUBLE_PRECISION,destProc,tag,myPE%comm,mpierr)
#else
  CALL MPI_send(sendBuf,n,MPI_SINGLE_PRECISION,destProc,tag,myPE%comm,mpierr)
#endif
#endif
ENDSUBROUTINE send_REAL3_MPI_Env_type
!
!-------------------------------------------------------------------------------
!> @brief Wrapper routine calls MPI_recv for characters
!> @param myPE parallel environment where the communication originates
!> @param sendbuf the data which is to be sent
@@ -978,6 +1121,62 @@ SUBROUTINE recv_REAL1_MPI_Env_type(myPE,recvbuf,n,srcProc,in_tag)
ENDSUBROUTINE recv_REAL1_MPI_Env_type
!
!-------------------------------------------------------------------------------
!> @brief Wrapper routine calls MPI_recv for reals
!> @param myPE parallel environment where the communication originates
!> @param sendbuf the data array which is to be sent
!> @param n the number of elements to be sent
!> @param destProc the rank of the recieving proc in myPE
!> @param in_tag message id which can be provided to distiguish messages
!>
SUBROUTINE recv_REAL2_MPI_Env_type(myPE,recvbuf,n,srcProc,in_tag)
  CLASS(MPI_EnvType),INTENT(IN) :: myPE
  REAL(SRK),INTENT(INOUT) :: recvbuf(:,:)
  INTEGER(SIK),INTENT(IN) :: n
  INTEGER(SIK),INTENT(IN) :: srcProc
  INTEGER(SIK),INTENT(IN),OPTIONAL :: in_tag
  !
#ifdef HAVE_MPI
  INTEGER :: stat(MPI_STATUS_SIZE)
  INTEGER(SIK) :: tag
  tag=1
  IF(PRESENT(in_tag)) tag=in_tag
#ifdef DBL
  CALL MPI_recv(recvBuf,n,MPI_DOUBLE_PRECISION,srcProc,tag,myPE%comm,stat,mpierr)
#else
  CALL MPI_recv(recvBuf,n,MPI_SINGLE_PRECISION,srcProc,tag,myPE%comm,stat,mpierr)
#endif
#endif
ENDSUBROUTINE recv_REAL2_MPI_Env_type
!
!-------------------------------------------------------------------------------
!> @brief Wrapper routine calls MPI_recv for reals
!> @param myPE parallel environment where the communication originates
!> @param sendbuf the data array which is to be sent
!> @param n the number of elements to be sent
!> @param destProc the rank of the recieving proc in myPE
!> @param in_tag message id which can be provided to distiguish messages
!>
SUBROUTINE recv_REAL3_MPI_Env_type(myPE,recvbuf,n,srcProc,in_tag)
  CLASS(MPI_EnvType),INTENT(IN) :: myPE
  REAL(SRK),INTENT(INOUT) :: recvbuf(:,:,:)
  INTEGER(SIK),INTENT(IN) :: n
  INTEGER(SIK),INTENT(IN) :: srcProc
  INTEGER(SIK),INTENT(IN),OPTIONAL :: in_tag
  !
#ifdef HAVE_MPI
  INTEGER :: stat(MPI_STATUS_SIZE)
  INTEGER(SIK) :: tag
  tag=1
  IF(PRESENT(in_tag)) tag=in_tag
#ifdef DBL
  CALL MPI_recv(recvBuf,n,MPI_DOUBLE_PRECISION,srcProc,tag,myPE%comm,stat,mpierr)
#else
  CALL MPI_recv(recvBuf,n,MPI_SINGLE_PRECISION,srcProc,tag,myPE%comm,stat,mpierr)
#endif
#endif
ENDSUBROUTINE recv_REAL3_MPI_Env_type
!
!-------------------------------------------------------------------------------
!> @brief Wrapper routine calls MPI_recv for integers
!> @param myPE parallel environment where the communication originates
!> @param recvbuf the scalar which is to be sent
@@ -1033,6 +1232,92 @@ SUBROUTINE recv_INT1_MPI_Env_type(myPE,recvbuf,n,srcProc,in_tag)
ENDSUBROUTINE recv_INT1_MPI_Env_type
!
!-------------------------------------------------------------------------------
!> @brief Wrapper routine calls MPI_recv for integers
!> @param myPE parallel environment where the communication originates
!> @param recvbuf the array which is to be sent
!> @param n the number of elements to be sent
!> @param destProc the rank of the recieving proc in myPE
!> @param in_tag message id which can be provided to distiguish messages
!>
SUBROUTINE recv_INT2_MPI_Env_type(myPE,recvbuf,n,srcProc,in_tag)
  CLASS(MPI_EnvType),INTENT(IN) :: myPE
  INTEGER(SIK),INTENT(INOUT) :: recvbuf(:,:)
  INTEGER(SIK),INTENT(IN) :: n
  INTEGER(SIK),INTENT(IN) :: srcProc
  INTEGER(SIK),INTENT(IN),OPTIONAL :: in_tag
  !
#ifdef HAVE_MPI
  INTEGER :: stat(MPI_STATUS_SIZE)
  INTEGER(SIK) :: tag

  tag=1
  IF(PRESENT(in_tag)) tag=in_tag
#ifdef DBLINT
  CALL MPI_recv(recvBuf,n,MPI_INTEGER8,srcProc,tag,myPE%comm,stat,mpierr)
#else
  CALL MPI_recv(recvBuf,n,MPI_INTEGER,srcProc,tag,myPE%comm,stat,mpierr)
#endif
#endif
ENDSUBROUTINE recv_INT2_MPI_Env_type
!
!-------------------------------------------------------------------------------
!> @brief Wrapper routine calls MPI_recv for integers
!> @param myPE parallel environment where the communication originates
!> @param recvbuf the array which is to be sent
!> @param n the number of elements to be sent
!> @param destProc the rank of the recieving proc in myPE
!> @param in_tag message id which can be provided to distiguish messages
!>
SUBROUTINE recv_INT3_MPI_Env_type(myPE,recvbuf,n,srcProc,in_tag)
  CLASS(MPI_EnvType),INTENT(IN) :: myPE
  INTEGER(SIK),INTENT(INOUT) :: recvbuf(:,:,:)
  INTEGER(SIK),INTENT(IN) :: n
  INTEGER(SIK),INTENT(IN) :: srcProc
  INTEGER(SIK),INTENT(IN),OPTIONAL :: in_tag
  !
#ifdef HAVE_MPI
  INTEGER :: stat(MPI_STATUS_SIZE)
  INTEGER(SIK) :: tag

  tag=1
  IF(PRESENT(in_tag)) tag=in_tag
#ifdef DBLINT
  CALL MPI_recv(recvBuf,n,MPI_INTEGER8,srcProc,tag,myPE%comm,stat,mpierr)
#else
  CALL MPI_recv(recvBuf,n,MPI_INTEGER,srcProc,tag,myPE%comm,stat,mpierr)
#endif
#endif
ENDSUBROUTINE recv_INT3_MPI_Env_type
!
!-------------------------------------------------------------------------------
!> @brief Wrapper routine calls MPI_Gather for logicals
!> @param myPE parallel environment where the communication originates
!> @param sendbuf the data which is to be sent
!> @param recvbuf the data which is to be received
!> @param root the rank of the root process
!>
SUBROUTINE gather_SBK0_MPI_Env_type(myPE,sendbuf,recvbuf,root)
  CLASS(MPI_EnvType),INTENT(IN) :: myPE
  LOGICAL(SBK),INTENT(IN) :: sendbuf
  LOGICAL(SBK),INTENT(OUT) :: recvbuf(:)
  INTEGER(SIK),INTENT(IN),OPTIONAL :: root
  INTEGER(SIK) :: rank
  rank=0
  IF(PRESENT(root)) rank=root
  REQUIRE(0 <= rank)
  REQUIRE(rank < myPE%nproc)
  IF(rank == myPE%rank) THEN
    REQUIRE(SIZE(recvbuf) == myPE%nproc)
  ENDIF
#ifdef HAVE_MPI
  CALL MPI_Gather(sendbuf,1,MPI_LOGICAL,recvbuf,1,MPI_LOGICAL, &
      rank,myPE%comm,mpierr)
#else
  recvbuf(1)=sendbuf
#endif
ENDSUBROUTINE gather_SBK0_MPI_Env_type
!
!-------------------------------------------------------------------------------
!> @brief Wrapper routine calls MPI_Gather for integers
!> @param myPE parallel environment where the communication originates
!> @param sendbuf the data which is to be sent
+21 −0
Original line number Diff line number Diff line
@@ -115,6 +115,7 @@ SUBROUTINE testMPIEnv()
  INTEGER(SLK) :: sbuf(2)
  INTEGER(SIK) :: sbuf_SIK(2),sbuf0_SIK
  REAL(SRK) :: sbuf_SRK(2),sbuf0_SRK
  REAL(SRK) :: sendSRK2(2,2), recvSRK2(2,2)
  REAL(SSK),ALLOCATABLE :: sendSSK1(:),recvSSK1(:)
  REAL(SDK),ALLOCATABLE :: sendSDK1(:),recvSDK1(:)
  INTEGER(SIK),ALLOCATABLE :: ranks_SIK(:),ranks2_SIK(:,:)
@@ -528,6 +529,16 @@ SUBROUTINE testMPIEnv()
      tag=5
      CALL testMPI%recv(sbuf_SRK,SIZE(sbuf_SRK),1,tag)
      ASSERT(ALL(sbuf_SRK == 0_SIK),'master recv')

      recvSRK2 = RESHAPE(SOURCE=[10.0_SRK, 20.0_SRK, 30.0_SRK, 40.0_SRK],SHAPE=[2,2])
      CALL testMPI%recv(recvSRK2, SIZE(recvSRK2), 1)
      ASSERT_APPROXEQ(recvSRK2(1,1),11.0_SRK,'recvSRK2(1,1)')
      ASSERT_APPROXEQ(recvSRK2(2,1),22.0_SRK,'recvSRK2(2,1)')
      ASSERT_APPROXEQ(recvSRK2(1,2),33.0_SRK,'recvSRK2(1,2)')
      ASSERT_APPROXEQ(recvSRK2(2,2),44.0_SRK,'recvSRK2(2,2)')

      sendSRK2 = RESHAPE(SOURCE=[10.0_SRK, 20.0_SRK, 30.0_SRK, 40.0_SRK],SHAPE=[2,2])
      CALL testMPI%send(sendSRK2, SIZE(sendSRK2), 1)
    ELSEIF(testMPI%rank ==1) THEN
      !Receive as largest possible integers
      tag=1
@@ -550,6 +561,16 @@ SUBROUTINE testMPIEnv()
      sbuf_SRK = 0
      tag=5
      CALL testMPI%send(sbuf_SRK,SIZE(sbuf_SRK),0,tag)

      sendSRK2 = RESHAPE(SOURCE=[11.0_SRK, 22.0_SRK, 33.0_SRK, 44.0_SRK],SHAPE=[2,2])
      CALL testMPI%send(sendSRK2, SIZE(sendSRK2), 0)

      recvSRK2 = sendSRK2
      CALL testMPI%recv(recvSRK2, SIZE(recvSRK2), 0)
      ASSERT_APPROXEQ(recvSRK2(1,1),10.0_SRK,'recvSRK2(1,1)')
      ASSERT_APPROXEQ(recvSRK2(2,1),20.0_SRK,'recvSRK2(2,1)')
      ASSERT_APPROXEQ(recvSRK2(1,2),30.0_SRK,'recvSRK2(1,2)')
      ASSERT_APPROXEQ(recvSRK2(2,2),40.0_SRK,'recvSRK2(2,2)')
    ENDIF
  ENDIF