Skip to content

Commit 16f093f

Browse files
authored
adding configure check for MPI version (#333)
* adding configure check for MPI version * improving MPI docs * removing unnecessary check
1 parent a62295d commit 16f093f

3 files changed

Lines changed: 15 additions & 7 deletions

File tree

CMakeLists.txt

Lines changed: 7 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -46,7 +46,7 @@ message(" CO_SIM_IO_BUILD_PYTHON: " ${CO_SIM_IO_BUILD_PYTHON})
4646
message(" CO_SIM_IO_STRICT_COMPILER: " ${CO_SIM_IO_STRICT_COMPILER})
4747
message("")
4848

49-
49+
# needs to be here as otherwise doesn't find MPI properly
5050
if (CO_SIM_IO_BUILD_MPI)
5151
find_package(MPI REQUIRED)
5252
add_definitions( -DCO_SIM_IO_USING_MPI )
@@ -148,12 +148,17 @@ generate_export_header( co_sim_io EXPORT_MACRO_NAME CO_SIM_IO_API EXPORT_FILE_NA
148148
install(TARGETS co_sim_io DESTINATION bin)
149149

150150
if (CO_SIM_IO_BUILD_MPI)
151-
# Adding CoSimIO library
151+
# optionally enable communication via MPI
152152
OPTION ( CO_SIM_IO_BUILD_MPI_COMMUNICATION "Enabling communication via MPI" OFF )
153153
if (CO_SIM_IO_BUILD_MPI_COMMUNICATION)
154154
add_definitions( -DCO_SIM_IO_BUILD_MPI_COMMUNICATION )
155155
message("Enabled communication via MPI")
156+
if(MPI_CXX_VERSION_MAJOR VERSION_LESS 2.0)
157+
message(WARNING "Communication via MPI requires at least MPI version 2.0! Currently detected version: " ${MPI_CXX_VERSION})
158+
endif ()
156159
endif()
160+
161+
# Adding CoSimIOMPI library
157162
file(GLOB_RECURSE co_sim_io_mpi_source_files ${CMAKE_CURRENT_SOURCE_DIR}/co_sim_io/mpi/sources/*.cpp)
158163
add_library (co_sim_io_mpi SHARED ${co_sim_io_mpi_source_files})
159164

co_sim_io/sources/communication/communication.cpp

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -441,7 +441,6 @@ Info Communication::GetMyInfo() const
441441
my_info.Set<std::string>("communication_format", GetCommunicationName());
442442
my_info.Set<std::string>("operating_system", GetOsName());
443443

444-
my_info.Set<bool>("is_distributed", GetDataCommunicator().IsDistributed());
445444
my_info.Set<int>("num_processes", GetDataCommunicator().Size());
446445

447446
my_info.Set<bool>("is_big_endian", Utilities::IsBigEndian());
@@ -502,8 +501,6 @@ void Communication::HandShake(const Info& I_Info)
502501

503502
CO_SIM_IO_ERROR_IF(GetDataCommunicator().Size() != mPartnerInfo.Get<int>("num_processes")) << "Mismatch in num_processes!\nMy num_processes: " << GetDataCommunicator().Size() << "\nPartner num_processes: " << mPartnerInfo.Get<int>("num_processes") << std::endl;
504503

505-
CO_SIM_IO_ERROR_IF(GetDataCommunicator().IsDistributed() != mPartnerInfo.Get<bool>("is_distributed")) << "Mismatch calling Connect(MPI)!\nMyself called: " << (GetDataCommunicator().IsDistributed()?"ConnectMPI":"Connect") << "\nPartner called: " << (mPartnerInfo.Get<bool>("is_distributed")?"ConnectMPI":"Connect") << std::endl;
506-
507504
CO_SIM_IO_ERROR_IF(mAlwaysUseSerializer != mPartnerInfo.Get<bool>("always_use_serializer")) << std::boolalpha << "Mismatch in always_use_serializer!\nMy always_use_serializer: " << mAlwaysUseSerializer << "\nPartner always_use_serializer: " << mPartnerInfo.Get<bool>("always_use_serializer") << std::noboolalpha << std::endl;
508505

509506
CO_SIM_IO_ERROR_IF(Serializer::TraceTypeToString(mSerializerTraceType) != mPartnerInfo.Get<std::string>("serializer_trace_type")) << "Mismatch in serializer_trace_type!\nMy serializer_trace_type: " << Serializer::TraceTypeToString(mSerializerTraceType) << "\nPartner serializer_trace_type: " << mPartnerInfo.Get<std::string>("serializer_trace_type") << std::endl;

docs/communication.md

Lines changed: 8 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -145,7 +145,7 @@ Set `communication_format` to `pipe`.
145145

146146
## MPI-based communication
147147
**This form of communication is experimental**
148-
MPI is usually used to communicate between different ranks within an executable/one MPI-communicator. MPI 2.0 added functionalities with which the communication can be done also between independent communicators. This can be done similarly to the socket based communication through opening ports and accepting connection (on the primary/server side) and connecting to the opened port (on the secondary/client side). After the connection is established, communication is done with the standard MPI calls like `MPI_Send` and `MPI_Recv`. This is oftentimes the fastest way of exchanging data in a distributed memory environment.
148+
MPI is usually used to communicate between different ranks within an executable/one MPI-communicator. MPI 2.0 added functionalities with which the communication can be done also between independent communicators (i.e. if two executables were started separately with MPI as shown below). This can be done similarly to the socket based communication through opening ports and accepting connection (on the primary/server side) and connecting to the opened port (on the secondary/client side). After the connection is established, communication is done with the standard MPI calls like `MPI_Send` and `MPI_Recv`. This is oftentimes the fastest way of exchanging data in a distributed memory environment.
149149

150150
The disadvantage of this form of communication is that the features required for establishing communication across communicators are not robustly available for all MPI implementations. Experience shows that it is problematic with OpenMPI but works well with IntelMPI. Furthermore it might be required to use the same compilers and MPI implementation for successfully connecting.
151151

@@ -155,7 +155,13 @@ This form of communication is based on MPI and is hence only available if a conn
155155

156156
The two executables are expected to be started with separate MPI calls:
157157
~~~
158-
mpiexec -np 4 ./execubtable_1 & mpiexec -np 4 ./execubtable_2
158+
mpiexec -np 4 ./execubtable_1 & mpiexec -np 4 ./execubtable_2
159+
~~~
160+
161+
OpenMPI works only with very recent versions (4.1) and requires additionally to start an `ompi-server`:
162+
~~~
163+
ompi-server -r server.txt
164+
mpiexec --ompi-server file:server.txt -np 4 ./execubtable_1 & mpiexec --ompi-server file:server.txt -np 4 ./execubtable_2
159165
~~~
160166

161167
The implementation of the _MPIInterCommunication_ can be found [here](https://github.com/KratosMultiphysics/CoSimIO/blob/master/co_sim_io/mpi/includes/communication/mpi_inter_communication.hpp).

0 commit comments

Comments
 (0)