00001 #include "artdaq/DAQdata/Globals.hh" 00002 #include "artdaq/Application/MPI2/MPISentry.hh" 00003 #include "artdaq/DAQrate/quiet_mpi.hh" 00004 #include "cetlib_except/exception.h" 00005 00006 #include <sstream> 00007 00008 artdaq::MPISentry:: 00009 MPISentry(int* argc_ptr, char*** argv_ptr) 00010 : 00011 threading_level_(0) 00012 , rank_(-1) 00013 , procs_(0) 00014 { 00015 MPI_Init(argc_ptr, argv_ptr); 00016 initialize_(); 00017 } 00018 00019 artdaq::MPISentry:: 00020 MPISentry(int* argc_ptr, 00021 char*** argv_ptr, 00022 int threading_level) 00023 : 00024 threading_level_(0) 00025 , rank_(-1) 00026 , procs_(0) 00027 { 00028 MPI_Init_thread(argc_ptr, argv_ptr, threading_level, &threading_level_); 00029 initialize_(); 00030 00031 std::ostringstream threadresult; 00032 threadresult << "MPI initialized with requested thread support level of " 00033 << threading_level << ", actual support level = " 00034 << threading_level_ << "."; 00035 00036 TLOG_DEBUG("MPISentry") << threadresult.str() << TLOG_ENDL; 00037 00038 if (threading_level != threading_level_) throw cet::exception("MPISentry") << threadresult.str(); 00039 00040 TLOG_DEBUG("MPISentry") 00041 << "size = " 00042 << procs_ 00043 << ", rank = " 00044 << rank_ << TLOG_ENDL; 00045 } 00046 00047 artdaq::MPISentry:: 00048 MPISentry(int* argc_ptr, 00049 char*** argv_ptr, 00050 int threading_level, artdaq::TaskType type, MPI_Comm& local_group_comm) 00051 : 00052 threading_level_(0) 00053 , rank_(-1) 00054 , procs_(0) 00055 { 00056 MPI_Init_thread(argc_ptr, argv_ptr, threading_level, &threading_level_); 00057 initialize_(); 00058 00059 std::ostringstream threadresult; 00060 threadresult << "MPI initialized with requested thread support level of " 00061 << threading_level << ", actual support level = " 00062 << threading_level_ << "."; 00063 00064 TLOG_DEBUG("MPISentry") << threadresult.str() << TLOG_ENDL; 00065 00066 if (threading_level != threading_level_) throw cet::exception("MPISentry") << threadresult.str(); 00067 00068 TLOG_DEBUG("MPISentry") 00069 << "size = " 00070 << procs_ 00071 << ", rank = " 00072 << rank_ << TLOG_ENDL; 00073 00074 std::ostringstream groupcommresult; 00075 00076 int status = MPI_Comm_split(MPI_COMM_WORLD, type, 0, &local_group_comm); 00077 00078 if (status == MPI_SUCCESS) 00079 { 00080 int temp_rank; 00081 MPI_Comm_rank(local_group_comm, &temp_rank); 00082 00083 groupcommresult << "Successfully created local communicator for type " 00084 << type << ", identifier = 0x" 00085 << std::hex << local_group_comm << std::dec 00086 << ", rank = " << temp_rank << "."; 00087 00088 TLOG_DEBUG("MPISentry") << groupcommresult.str() << TLOG_ENDL; 00089 } 00090 else 00091 { 00092 groupcommresult << "Failed to create the local MPI communicator group for " 00093 << "task type #" << type << ", status code = " << status << "."; 00094 throw cet::exception("MPISentry") << groupcommresult.str(); 00095 } 00096 } 00097 00098 artdaq::MPISentry:: 00099 ~MPISentry() 00100 { 00101 MPI_Finalize(); 00102 } 00103 00104 int 00105 artdaq::MPISentry:: 00106 threading_level() const 00107 { 00108 return threading_level_; 00109 } 00110 00111 int 00112 artdaq::MPISentry:: 00113 rank() const 00114 { 00115 return rank_; 00116 } 00117 00118 int 00119 artdaq::MPISentry:: 00120 procs() const 00121 { 00122 return procs_; 00123 } 00124 00125 void 00126 artdaq::MPISentry:: 00127 initialize_() 00128 { 00129 MPI_Comm_size(MPI_COMM_WORLD, &procs_); 00130 MPI_Comm_rank(MPI_COMM_WORLD, &rank_); 00131 my_rank = rank_; 00132 }