Loading src/MeshTransfer.f90 +2 −2 Original line number Diff line number Diff line Loading @@ -1408,7 +1408,7 @@ FUNCTION LPIntegral(coef,a,b,extrap,h,nodeX) RESULT(y) IF(PRESENT(nodeX)) xa=a-nodeX IF(PRESENT(nodeX)) xb=b-nodeX REQUIRE(ABS(xb-xa) <= w .OR. extrap) REQUIRE((ABS(xb-xa) .APPROXLE. w) .OR. extrap) N=SIZE(coef) Loading Loading @@ -1485,7 +1485,7 @@ FUNCTION ZPIntegral(coef,a,b,extrap,h) RESULT(y) w=ONE IF(PRESENT(h)) w=h REQUIRE(ABS(b-a) <= w .OR. extrap) REQUIRE((ABS(b-a) .APPROXLE. w) .OR. extrap) y=LPIntegral(coef,TWO*(a/w)*(a/w)-ONE,TWO*(b/w)*(b/w)-ONE,extrap) ENDFUNCTION ZPIntegral Loading src/ParallelEnv.f90 +144 −0 Original line number Diff line number Diff line Loading @@ -449,6 +449,16 @@ TYPE,EXTENDS(ParEnvType) :: MPI_EnvType scanSum_SIK_MPI_Env_type, & scanSum_SRK0_MPI_Env_type, & scanSum_SRK_MPI_Env_type !> @copybrief ParallelEnv::ExchangeDataI_MPI_Env_type !> @copydetails ParallelEnv::ExchangeDataI_MPI_Env_type PROCEDURE,PASS,PRIVATE :: ExchangeDataI_MPI_Env_type !> @copybrief ParallelEnv::ExchangeDataR_MPI_Env_type !> @copydetails ParallelEnv::ExchangeDataR_MPI_Env_type PROCEDURE,PASS,PRIVATE :: ExchangeDataR_MPI_Env_type GENERIC :: exchangeData => ExchangeDataI_MPI_Env_type, ExchangeDataR_MPI_Env_type !> @copybrief ParallelEnv::WaitForComm_MPI_Env_type !> @copydetails ParallelEnv::WaitForComm_MPI_Env_type PROCEDURE,PASS :: WaitForComm => WaitForComm_MPI_Env_type ENDTYPE MPI_EnvType !> Type describes basic information about OpenMP environment Loading Loading @@ -3394,5 +3404,139 @@ SUBROUTINE assign_ParEnvType(pe1,pe2) ENDIF ENDSUBROUTINE assign_ParEnvType ! !------------------------------------------------------------------------------- !> @brief This routine wraps a couple MPI calls for non-blocking send/recv !> @param this the MPI environment to operate on !> @param n The number of real data being sent !> @param SendBuf The vector of real data being sent !> @param RecvBuf The vector to hold the data that is received !> @param TargetRank The processor id sending to and receiving from !> @param SendTag The sent id tag for the message passing !> @param RecvTag The received id tag for the message passing !> @param SendReq The send request statuses !> @param RecvReq The receive request statuses !> SUBROUTINE ExchangeDataR_MPI_Env_type(this,n,SendBuf,RecvBuf,TargetRank,SendTag,RecvTag,SendReq,RecvReq) CLASS(MPI_EnvType),INTENT(IN) :: this INTEGER(SIK),INTENT(IN) :: n REAL(SRK),INTENT(IN) :: SendBuf(*) REAL(SRK),INTENT(OUT) :: RecvBuf(*) INTEGER(SIK),INTENT(IN) :: TargetRank INTEGER(SIK),INTENT(IN) :: SendTag INTEGER(SIK),INTENT(IN) :: RecvTag INTEGER(SIK),INTENT(OUT) :: SendReq INTEGER(SIK),INTENT(OUT) :: RecvReq #ifdef HAVE_MPI CHARACTER(LEN=*),PARAMETER :: myName='ExchangeDataR_MPI_Env_type' #ifdef DBL INTEGER(SIK),PARAMETER :: MPI_SRK_SIZE=MPI_DOUBLE_PRECISION #else INTEGER(SIK),PARAMETER :: MPI_SRK_SIZE=MPI_SINGLE_PRECISION #endif INTEGER(SIK) :: mpierr IF(TargetRank >= 0) THEN !Initiate non-blocking send CALL MPI_Isend(SendBuf,n,MPI_SRK_SIZE,TargetRank,SendTag,this%comm,SendReq,mpierr) IF(mpierr /= MPI_SUCCESS) THEN CALL eParEnv%raiseFatalError(modName//'::'//myName// & ' - Error with MPI_Isend trying to send data!') ENDIF !Initiate non-blocking receive CALL MPI_Irecv(RecvBuf,n,MPI_SRK_SIZE,TargetRank,RecvTag,this%comm,RecvReq,mpierr) IF(mpierr /= MPI_SUCCESS) THEN CALL eParEnv%raiseFatalError(modName//'::'//myName// & ' - Error with MPI_Irecv trying to receive data!') ENDIF ENDIF #else SendReq=0 RecvReq=0 RecvBuf(1:n)=SendBuf(1:n) #endif ENDSUBROUTINE ExchangeDataR_MPI_Env_type ! !------------------------------------------------------------------------------- !> @brief This routine wraps a couple MPI calls for non-blocking send/recv !> @param this the MPI environment to operate on !> @param n The number of data being sent !> @param SendBuf The vector of data being sent !> @param RecvBuf The vector to hold the data that is received !> @param TargetRank The rank sending to and receiving from !> @param SendTag The sent id tag for the message passing !> @param RecvTag The received id tag for the message passing !> @param SendReq The send request statuses !> @param RecvReq The receive request statuses !> SUBROUTINE ExchangeDataI_MPI_Env_type(this,n,SendBuf,RecvBuf,TargetRank,SendTag,RecvTag,SendReq,RecvReq) CLASS(MPI_EnvType),INTENT(IN) :: this INTEGER(SIK),INTENT(IN) :: n INTEGER(SIK),INTENT(IN) :: SendBuf(n) INTEGER(SIK),INTENT(OUT) :: RecvBuf(n) INTEGER(SIK),INTENT(IN) :: TargetRank INTEGER(SIK),INTENT(IN) :: SendTag INTEGER(SIK),INTENT(IN) :: RecvTag INTEGER(SIK),INTENT(OUT) :: SendReq INTEGER(SIK),INTENT(OUT) :: RecvReq #ifdef HAVE_MPI CHARACTER(LEN=*),PARAMETER :: myName='ExchangeDataI_MPI_Env_type' INTEGER(SIK) :: mpierr IF(TargetRank >= 0) THEN !Initiate non-blocking send CALL MPI_Isend(SendBuf,n,MPI_INTEGER,TargetRank,SendTag,this%comm,SendReq,mpierr) IF(mpierr /= MPI_SUCCESS) THEN CALL eParEnv%raiseFatalError(modName//'::'//myName// & ' - Error with MPI_Isend trying to send data!') ENDIF !Initiate non-blocking receive CALL MPI_Irecv(RecvBuf,n,MPI_INTEGER,TargetRank,RecvTag,this%comm,RecvReq,mpierr) IF(mpierr /= MPI_SUCCESS) THEN CALL eParEnv%raiseFatalError(modName//'::'//myName// & ' - Error with MPI_Irecv trying to receive data!') ENDIF ENDIF #else SendReq=0 RecvReq=0 RecvBuf(1:n)=SendBuf(1:n) #endif ENDSUBROUTINE ExchangeDataI_MPI_Env_type ! !------------------------------------------------------------------------------- !> @brief Waits till the parallel boundary data passing is finished !> @param this the MPI environment to operate on !> @param n the nubmer of requests to wait on !> @param RecvRequests receive requests from neighboring processors !> @param SendRequests send requests to neighboring processors !> !> Waits until all requests are filled !> SUBROUTINE WaitForComm_MPI_Env_type(this,n,RecvRequests,SendRequests) CLASS(MPI_EnvType),INTENT(IN) :: this INTEGER(SIK),INTENT(IN) :: n INTEGER(SIK),INTENT(IN) :: RecvRequests(*) INTEGER(SIK),INTENT(IN) :: SendRequests(*) #ifdef HAVE_MPI CHARACTER(LEN=*),PARAMETER :: myName='WaitForComm' INTEGER(SIK) :: mpierr CALL MPI_Waitall(n,SendRequests,MPI_STATUSES_IGNORE,mpierr) CALL MPI_Waitall(n,RecvRequests,MPI_STATUSES_IGNORE,mpierr) IF(mpierr /= MPI_SUCCESS) THEN CALL eParEnv%raiseFatalError(modName//'::'//myName// & ' - Error with MPI waiting to receive of boundary data!') ENDIF #endif ENDSUBROUTINE WaitForComm_MPI_Env_type ! ENDMODULE ParallelEnv Loading
src/MeshTransfer.f90 +2 −2 Original line number Diff line number Diff line Loading @@ -1408,7 +1408,7 @@ FUNCTION LPIntegral(coef,a,b,extrap,h,nodeX) RESULT(y) IF(PRESENT(nodeX)) xa=a-nodeX IF(PRESENT(nodeX)) xb=b-nodeX REQUIRE(ABS(xb-xa) <= w .OR. extrap) REQUIRE((ABS(xb-xa) .APPROXLE. w) .OR. extrap) N=SIZE(coef) Loading Loading @@ -1485,7 +1485,7 @@ FUNCTION ZPIntegral(coef,a,b,extrap,h) RESULT(y) w=ONE IF(PRESENT(h)) w=h REQUIRE(ABS(b-a) <= w .OR. extrap) REQUIRE((ABS(b-a) .APPROXLE. w) .OR. extrap) y=LPIntegral(coef,TWO*(a/w)*(a/w)-ONE,TWO*(b/w)*(b/w)-ONE,extrap) ENDFUNCTION ZPIntegral Loading
src/ParallelEnv.f90 +144 −0 Original line number Diff line number Diff line Loading @@ -449,6 +449,16 @@ TYPE,EXTENDS(ParEnvType) :: MPI_EnvType scanSum_SIK_MPI_Env_type, & scanSum_SRK0_MPI_Env_type, & scanSum_SRK_MPI_Env_type !> @copybrief ParallelEnv::ExchangeDataI_MPI_Env_type !> @copydetails ParallelEnv::ExchangeDataI_MPI_Env_type PROCEDURE,PASS,PRIVATE :: ExchangeDataI_MPI_Env_type !> @copybrief ParallelEnv::ExchangeDataR_MPI_Env_type !> @copydetails ParallelEnv::ExchangeDataR_MPI_Env_type PROCEDURE,PASS,PRIVATE :: ExchangeDataR_MPI_Env_type GENERIC :: exchangeData => ExchangeDataI_MPI_Env_type, ExchangeDataR_MPI_Env_type !> @copybrief ParallelEnv::WaitForComm_MPI_Env_type !> @copydetails ParallelEnv::WaitForComm_MPI_Env_type PROCEDURE,PASS :: WaitForComm => WaitForComm_MPI_Env_type ENDTYPE MPI_EnvType !> Type describes basic information about OpenMP environment Loading Loading @@ -3394,5 +3404,139 @@ SUBROUTINE assign_ParEnvType(pe1,pe2) ENDIF ENDSUBROUTINE assign_ParEnvType ! !------------------------------------------------------------------------------- !> @brief This routine wraps a couple MPI calls for non-blocking send/recv !> @param this the MPI environment to operate on !> @param n The number of real data being sent !> @param SendBuf The vector of real data being sent !> @param RecvBuf The vector to hold the data that is received !> @param TargetRank The processor id sending to and receiving from !> @param SendTag The sent id tag for the message passing !> @param RecvTag The received id tag for the message passing !> @param SendReq The send request statuses !> @param RecvReq The receive request statuses !> SUBROUTINE ExchangeDataR_MPI_Env_type(this,n,SendBuf,RecvBuf,TargetRank,SendTag,RecvTag,SendReq,RecvReq) CLASS(MPI_EnvType),INTENT(IN) :: this INTEGER(SIK),INTENT(IN) :: n REAL(SRK),INTENT(IN) :: SendBuf(*) REAL(SRK),INTENT(OUT) :: RecvBuf(*) INTEGER(SIK),INTENT(IN) :: TargetRank INTEGER(SIK),INTENT(IN) :: SendTag INTEGER(SIK),INTENT(IN) :: RecvTag INTEGER(SIK),INTENT(OUT) :: SendReq INTEGER(SIK),INTENT(OUT) :: RecvReq #ifdef HAVE_MPI CHARACTER(LEN=*),PARAMETER :: myName='ExchangeDataR_MPI_Env_type' #ifdef DBL INTEGER(SIK),PARAMETER :: MPI_SRK_SIZE=MPI_DOUBLE_PRECISION #else INTEGER(SIK),PARAMETER :: MPI_SRK_SIZE=MPI_SINGLE_PRECISION #endif INTEGER(SIK) :: mpierr IF(TargetRank >= 0) THEN !Initiate non-blocking send CALL MPI_Isend(SendBuf,n,MPI_SRK_SIZE,TargetRank,SendTag,this%comm,SendReq,mpierr) IF(mpierr /= MPI_SUCCESS) THEN CALL eParEnv%raiseFatalError(modName//'::'//myName// & ' - Error with MPI_Isend trying to send data!') ENDIF !Initiate non-blocking receive CALL MPI_Irecv(RecvBuf,n,MPI_SRK_SIZE,TargetRank,RecvTag,this%comm,RecvReq,mpierr) IF(mpierr /= MPI_SUCCESS) THEN CALL eParEnv%raiseFatalError(modName//'::'//myName// & ' - Error with MPI_Irecv trying to receive data!') ENDIF ENDIF #else SendReq=0 RecvReq=0 RecvBuf(1:n)=SendBuf(1:n) #endif ENDSUBROUTINE ExchangeDataR_MPI_Env_type ! !------------------------------------------------------------------------------- !> @brief This routine wraps a couple MPI calls for non-blocking send/recv !> @param this the MPI environment to operate on !> @param n The number of data being sent !> @param SendBuf The vector of data being sent !> @param RecvBuf The vector to hold the data that is received !> @param TargetRank The rank sending to and receiving from !> @param SendTag The sent id tag for the message passing !> @param RecvTag The received id tag for the message passing !> @param SendReq The send request statuses !> @param RecvReq The receive request statuses !> SUBROUTINE ExchangeDataI_MPI_Env_type(this,n,SendBuf,RecvBuf,TargetRank,SendTag,RecvTag,SendReq,RecvReq) CLASS(MPI_EnvType),INTENT(IN) :: this INTEGER(SIK),INTENT(IN) :: n INTEGER(SIK),INTENT(IN) :: SendBuf(n) INTEGER(SIK),INTENT(OUT) :: RecvBuf(n) INTEGER(SIK),INTENT(IN) :: TargetRank INTEGER(SIK),INTENT(IN) :: SendTag INTEGER(SIK),INTENT(IN) :: RecvTag INTEGER(SIK),INTENT(OUT) :: SendReq INTEGER(SIK),INTENT(OUT) :: RecvReq #ifdef HAVE_MPI CHARACTER(LEN=*),PARAMETER :: myName='ExchangeDataI_MPI_Env_type' INTEGER(SIK) :: mpierr IF(TargetRank >= 0) THEN !Initiate non-blocking send CALL MPI_Isend(SendBuf,n,MPI_INTEGER,TargetRank,SendTag,this%comm,SendReq,mpierr) IF(mpierr /= MPI_SUCCESS) THEN CALL eParEnv%raiseFatalError(modName//'::'//myName// & ' - Error with MPI_Isend trying to send data!') ENDIF !Initiate non-blocking receive CALL MPI_Irecv(RecvBuf,n,MPI_INTEGER,TargetRank,RecvTag,this%comm,RecvReq,mpierr) IF(mpierr /= MPI_SUCCESS) THEN CALL eParEnv%raiseFatalError(modName//'::'//myName// & ' - Error with MPI_Irecv trying to receive data!') ENDIF ENDIF #else SendReq=0 RecvReq=0 RecvBuf(1:n)=SendBuf(1:n) #endif ENDSUBROUTINE ExchangeDataI_MPI_Env_type ! !------------------------------------------------------------------------------- !> @brief Waits till the parallel boundary data passing is finished !> @param this the MPI environment to operate on !> @param n the nubmer of requests to wait on !> @param RecvRequests receive requests from neighboring processors !> @param SendRequests send requests to neighboring processors !> !> Waits until all requests are filled !> SUBROUTINE WaitForComm_MPI_Env_type(this,n,RecvRequests,SendRequests) CLASS(MPI_EnvType),INTENT(IN) :: this INTEGER(SIK),INTENT(IN) :: n INTEGER(SIK),INTENT(IN) :: RecvRequests(*) INTEGER(SIK),INTENT(IN) :: SendRequests(*) #ifdef HAVE_MPI CHARACTER(LEN=*),PARAMETER :: myName='WaitForComm' INTEGER(SIK) :: mpierr CALL MPI_Waitall(n,SendRequests,MPI_STATUSES_IGNORE,mpierr) CALL MPI_Waitall(n,RecvRequests,MPI_STATUSES_IGNORE,mpierr) IF(mpierr /= MPI_SUCCESS) THEN CALL eParEnv%raiseFatalError(modName//'::'//myName// & ' - Error with MPI waiting to receive of boundary data!') ENDIF #endif ENDSUBROUTINE WaitForComm_MPI_Env_type ! ENDMODULE ParallelEnv