Skip to content
Snippets Groups Projects
Commit 86fac568 authored by William F Godoy's avatar William F Godoy
Browse files

async I/O implementation of adios Write in heat transfer examples

parent 1a91dd77
No related branches found
No related tags found
1 merge request!198Async I/O implementation of adios Write in heat transfer examples
......@@ -15,7 +15,7 @@ if(ADIOS2_HAVE_MPI)
target_include_directories(heatTransfer_write_adios2
PRIVATE ${MPI_C_INCLUDE_PATH}
)
target_link_libraries(heatTransfer_write_adios2 adios2 ${MPI_C_LIBRARIES})
target_link_libraries(heatTransfer_write_adios2 adios2 ${MPI_C_LIBRARIES} pthread)
target_compile_definitions(heatTransfer_write_adios2 PRIVATE
-DDEFAULT_CONFIG=${CMAKE_CURRENT_SOURCE_DIR}/config.xml)
......@@ -33,7 +33,7 @@ if(ADIOS2_HAVE_MPI)
PRIVATE ${MPI_C_INCLUDE_PATH}
)
target_link_libraries(heatTransfer_write_adios1
adios1::adios ${MPI_C_LIBRARIES}
adios1::adios ${MPI_C_LIBRARIES} pthread
)
endif()
......@@ -51,7 +51,7 @@ if(ADIOS2_HAVE_MPI)
PRIVATE ${MPI_C_INCLUDE_PATH} ${HDF5_C_INCLUDE_DIRS}
)
target_link_libraries(heatTransfer_write_hdf5
${MPI_C_LIBRARIES} ${HDF5_C_LIBRARIES}
${MPI_C_LIBRARIES} ${HDF5_C_LIBRARIES} pthread
)
endif()
......@@ -70,7 +70,7 @@ if(ADIOS2_HAVE_MPI)
PRIVATE ${MPI_C_INCLUDE_PATH} ${HDF5_C_INCLUDE_DIRS}
)
target_link_libraries(heatTransfer_write_ph5
${MPI_C_LIBRARIES} ${HDF5_C_LIBRARIES}
${MPI_C_LIBRARIES} ${HDF5_C_LIBRARIES} pthread
)
endif()
......@@ -91,7 +91,7 @@ if(ADIOS2_HAVE_MPI)
#target_link_libraries(heatTransfer_write_a2h5
# ${MPI_C_LIBRARIES}
#)
target_link_libraries(heatTransfer_write_a2h5 PUBLIC adios2)
target_link_libraries(heatTransfer_write_a2h5 PUBLIC adios2 pthread)
endif()
......
......@@ -49,6 +49,24 @@ Settings::Settings(int argc, char *argv[], int rank, int nproc) : rank{rank}
steps = convertToUint("steps", argv[6]);
iterations = convertToUint("iterations", argv[7]);
if (argc == 9)
{
const std::string asyncArg(argv[8]);
if (asyncArg == "ON" || asyncArg == "on")
{
async = true;
}
else if (asyncArg == "OFF" || asyncArg == "off")
{
// nothing off is default
}
else
{
throw std::invalid_argument("ERROR: wrong async argument " +
asyncArg + " must be on or off\n");
}
}
if (npx * npy != this->nproc)
{
throw std::invalid_argument("N*M must equal the number of processes");
......
......@@ -47,6 +47,9 @@ public:
int rank_up;
int rank_down;
/** true: std::async Write, false (default): sync */
bool async = false;
Settings(int argc, char *argv[], int rank, int nproc);
};
......
......@@ -12,6 +12,7 @@
*/
#include <mpi.h>
#include <future> //std::future, std::async
#include <iostream>
#include <memory>
#include <stdexcept>
......@@ -23,15 +24,16 @@
void printUsage()
{
std::cout
<< "Usage: heatTransfer output N M nx ny steps iterations\n"
<< " output: name of output file\n"
<< " N: number of processes in X dimension\n"
<< " M: number of processes in Y dimension\n"
<< " nx: local array size in X dimension per processor\n"
<< " ny: local array size in Y dimension per processor\n"
<< " steps: the total number of steps to output\n"
<< " iterations: one step consist of this many iterations\n\n";
std::cout << "Usage: heatTransfer output N M nx ny steps "
"iterations async\n"
<< " output: name of output file\n"
<< " N: number of processes in X dimension\n"
<< " M: number of processes in Y dimension\n"
<< " nx: local array size in X dimension per processor\n"
<< " ny: local array size in Y dimension per processor\n"
<< " steps: the total number of steps to output\n"
<< " iterations: one step consist of this many iterations\n"
<< " async: on or off (default) \n\n";
}
int main(int argc, char *argv[])
......@@ -68,7 +70,18 @@ int main(int argc, char *argv[])
ht.heatEdges();
ht.exchange(mpiHeatTransferComm);
// ht.printT("Heated T:", mpiHeatTransferComm);
io.write(0, ht, settings, mpiHeatTransferComm);
std::future<void> futureWrite;
if (settings.async)
{
futureWrite =
std::async(std::launch::async, &IO::write, &io, 0, std::ref(ht),
std::ref(settings), mpiHeatTransferComm);
}
else
{
io.write(0, ht, settings, mpiHeatTransferComm);
}
for (unsigned int t = 1; t <= settings.steps; ++t)
{
......@@ -80,7 +93,22 @@ int main(int argc, char *argv[])
ht.exchange(mpiHeatTransferComm);
ht.heatEdges();
}
io.write(t, ht, settings, mpiHeatTransferComm);
if (settings.async)
{
futureWrite.get();
futureWrite = std::async(std::launch::async, &IO::write, &io, t,
std::ref(ht), std::ref(settings),
mpiHeatTransferComm);
}
else
{
io.write(t, ht, settings, mpiHeatTransferComm);
}
}
if (settings.async)
{
futureWrite.get();
}
MPI_Barrier(mpiHeatTransferComm);
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment