00001 #define TRACE_NAME "MPISentry" 00002 #include "artdaq/DAQdata/Globals.hh" 00003 #include "artdaq-mpich-plugin/Application/MPISentry.hh" 00004 #include "artdaq-mpich-plugin/Utilities/quiet_mpi.hh" 00005 #include "cetlib_except/exception.h" 00006 00007 #include <sstream> 00008 00009 artdaq::MPISentry:: 00010 MPISentry(int* argc_ptr, char*** argv_ptr) 00011 : 00012 threading_level_(0) 00013 , rank_(-1) 00014 , procs_(0) 00015 { 00016 MPI_Init(argc_ptr, argv_ptr); 00017 initialize_(); 00018 } 00019 00020 artdaq::MPISentry:: 00021 MPISentry(int* argc_ptr, 00022 char*** argv_ptr, 00023 int threading_level) 00024 : 00025 threading_level_(0) 00026 , rank_(-1) 00027 , procs_(0) 00028 { 00029 MPI_Init_thread(argc_ptr, argv_ptr, threading_level, &threading_level_); 00030 initialize_(); 00031 00032 std::ostringstream threadresult; 00033 threadresult << "MPI initialized with requested thread support level of " 00034 << threading_level << ", actual support level = " 00035 << threading_level_ << "."; 00036 00037 TLOG(TLVL_DEBUG) << threadresult.str() ; 00038 00039 if (threading_level != threading_level_) throw cet::exception("MPISentry") << threadresult.str(); 00040 00041 TLOG(TLVL_DEBUG) 00042 << "size = " 00043 << procs_ 00044 << ", rank = " 00045 << rank_ ; 00046 } 00047 00048 artdaq::MPISentry:: 00049 MPISentry(int* argc_ptr, 00050 char*** argv_ptr, 00051 int threading_level, artdaq::TaskType type, MPI_Comm& local_group_comm) 00052 : 00053 threading_level_(0) 00054 , rank_(-1) 00055 , procs_(0) 00056 { 00057 MPI_Init_thread(argc_ptr, argv_ptr, threading_level, &threading_level_); 00058 initialize_(); 00059 00060 std::ostringstream threadresult; 00061 threadresult << "MPI initialized with requested thread support level of " 00062 << threading_level << ", actual support level = " 00063 << threading_level_ << "."; 00064 00065 TLOG(TLVL_DEBUG) << threadresult.str() ; 00066 00067 if (threading_level != threading_level_) throw cet::exception("MPISentry") << threadresult.str(); 00068 00069 TLOG(TLVL_DEBUG) 00070 << "size = " 00071 << procs_ 00072 << ", rank = " 00073 << rank_ ; 00074 00075 00076 int status = MPI_Comm_split(MPI_COMM_WORLD, type, 0, &local_group_comm); 00077 00078 if (status == MPI_SUCCESS) 00079 { 00080 int temp_rank; 00081 MPI_Comm_rank(local_group_comm, &temp_rank); 00082 00083 TLOG(TLVL_DEBUG) << "Successfully created local communicator for type " 00084 << type << ", identifier = 0x" 00085 << std::hex << local_group_comm << std::dec 00086 << ", rank = " << temp_rank << "."; 00087 } 00088 else 00089 { 00090 std::ostringstream groupcommresult; 00091 groupcommresult << "Failed to create the local MPI communicator group for " 00092 << "task type #" << type << ", status code = " << status << "."; 00093 TLOG(TLVL_ERROR) << groupcommresult.str(); 00094 throw cet::exception("MPISentry") << groupcommresult.str(); 00095 } 00096 } 00097 00098 artdaq::MPISentry:: 00099 ~MPISentry() 00100 { 00101 MPI_Finalize(); 00102 } 00103 00104 int 00105 artdaq::MPISentry:: 00106 threading_level() const 00107 { 00108 return threading_level_; 00109 } 00110 00111 int 00112 artdaq::MPISentry:: 00113 rank() const 00114 { 00115 return rank_; 00116 } 00117 00118 int 00119 artdaq::MPISentry:: 00120 procs() const 00121 { 00122 return procs_; 00123 } 00124 00125 void 00126 artdaq::MPISentry:: 00127 initialize_() 00128 { 00129 MPI_Comm_size(MPI_COMM_WORLD, &procs_); 00130 MPI_Comm_rank(MPI_COMM_WORLD, &rank_); 00131 my_rank = rank_; 00132 }