2 #define TRACE_NAME "HighFiveNtupleDataset"
4 #include "artdaq-core/Data/ContainerFragment.hh"
5 #include "artdaq-demo-hdf5/HDF5/highFive/highFiveNtupleDataset.hh"
12 , nWordsPerRow_(ps.get<size_t>(
"nWordsPerRow", 10240))
13 , fragment_datasets_()
16 TLOG(TLVL_DEBUG) <<
"HighFiveNtupleDataset Constructor BEGIN";
17 auto payloadChunkSize = ps.get<
size_t>(
"payloadChunkSize", 128);
18 HighFive::DataSetAccessProps payloadAccessProps;
19 payloadAccessProps.add(HighFive::Caching(12421, ps.get<
size_t>(
"chunkCacheSizeBytes",
sizeof(artdaq::RawDataType) * payloadChunkSize * nWordsPerRow_ * 10), 0.5));
21 if (
mode_ == FragmentDatasetMode::Read)
23 TLOG(TLVL_TRACE) <<
"HighFiveNtupleDataset: Opening input file and getting Dataset pointers";
24 file_.reset(
new HighFive::File(ps.get<std::string>(
"fileName"), HighFive::File::ReadOnly));
26 auto fragmentGroup = file_->getGroup(
"/Fragments");
27 fragment_datasets_[
"sequenceID"] = std::make_unique<HighFiveDatasetHelper>(fragmentGroup.getDataSet(
"sequenceID"));
28 fragment_datasets_[
"fragmentID"] = std::make_unique<HighFiveDatasetHelper>(fragmentGroup.getDataSet(
"fragmentID"));
29 fragment_datasets_[
"timestamp"] = std::make_unique<HighFiveDatasetHelper>(fragmentGroup.getDataSet(
"timestamp"));
30 fragment_datasets_[
"type"] = std::make_unique<HighFiveDatasetHelper>(fragmentGroup.getDataSet(
"type"));
31 fragment_datasets_[
"size"] = std::make_unique<HighFiveDatasetHelper>(fragmentGroup.getDataSet(
"size"));
32 fragment_datasets_[
"index"] = std::make_unique<HighFiveDatasetHelper>(fragmentGroup.getDataSet(
"index"));
33 fragment_datasets_[
"payload"] = std::make_unique<HighFiveDatasetHelper>(fragmentGroup.getDataSet(
"payload", payloadAccessProps));
34 auto headerGroup = file_->getGroup(
"/EventHeaders");
35 event_datasets_[
"run_id"] = std::make_unique<HighFiveDatasetHelper>(headerGroup.getDataSet(
"run_id"));
36 event_datasets_[
"subrun_id"] = std::make_unique<HighFiveDatasetHelper>(headerGroup.getDataSet(
"subrun_id"));
37 event_datasets_[
"event_id"] = std::make_unique<HighFiveDatasetHelper>(headerGroup.getDataSet(
"event_id"));
38 event_datasets_[
"sequenceID"] = std::make_unique<HighFiveDatasetHelper>(headerGroup.getDataSet(
"sequenceID"));
39 event_datasets_[
"is_complete"] = std::make_unique<HighFiveDatasetHelper>(headerGroup.getDataSet(
"is_complete"));
43 TLOG(TLVL_TRACE) <<
"HighFiveNtupleDataset: Creating output file";
44 file_.reset(
new HighFive::File(ps.get<std::string>(
"fileName"), HighFive::File::OpenOrCreate | HighFive::File::Truncate));
46 HighFive::DataSetCreateProps scalar_props;
47 scalar_props.add(HighFive::Chunking(std::vector<hsize_t>{128, 1}));
48 HighFive::DataSetCreateProps vector_props;
49 vector_props.add(HighFive::Chunking(std::vector<hsize_t>{payloadChunkSize, nWordsPerRow_}));
51 HighFive::DataSpace scalarSpace = HighFive::DataSpace({0, 1}, {HighFive::DataSpace::UNLIMITED, 1});
52 HighFive::DataSpace vectorSpace = HighFive::DataSpace({0, nWordsPerRow_}, {HighFive::DataSpace::UNLIMITED, nWordsPerRow_});
54 TLOG(TLVL_TRACE) <<
"HighFiveNtupleDataset: Creating Fragment datasets";
55 auto fragmentGroup = file_->createGroup(
"/Fragments");
56 fragment_datasets_[
"sequenceID"] = std::make_unique<HighFiveDatasetHelper>(fragmentGroup.createDataSet<uint64_t>(
"sequenceID", scalarSpace, scalar_props));
57 fragment_datasets_[
"fragmentID"] = std::make_unique<HighFiveDatasetHelper>(fragmentGroup.createDataSet<uint16_t>(
"fragmentID", scalarSpace, scalar_props));
58 fragment_datasets_[
"timestamp"] = std::make_unique<HighFiveDatasetHelper>(fragmentGroup.createDataSet<uint64_t>(
"timestamp", scalarSpace, scalar_props));
59 fragment_datasets_[
"type"] = std::make_unique<HighFiveDatasetHelper>(fragmentGroup.createDataSet<uint8_t>(
"type", scalarSpace, scalar_props));
60 fragment_datasets_[
"size"] = std::make_unique<HighFiveDatasetHelper>(fragmentGroup.createDataSet<uint64_t>(
"size", scalarSpace, scalar_props));
61 fragment_datasets_[
"index"] = std::make_unique<HighFiveDatasetHelper>(fragmentGroup.createDataSet<uint64_t>(
"index", scalarSpace, scalar_props));
62 fragment_datasets_[
"payload"] = std::make_unique<HighFiveDatasetHelper>(fragmentGroup.createDataSet<artdaq::RawDataType>(
"payload", vectorSpace, vector_props, payloadAccessProps), payloadChunkSize);
64 TLOG(TLVL_TRACE) <<
"HighFiveNtupleDataset: Creating EventHeader datasets";
65 auto headerGroup = file_->createGroup(
"/EventHeaders");
66 event_datasets_[
"run_id"] = std::make_unique<HighFiveDatasetHelper>(headerGroup.createDataSet<uint32_t>(
"run_id", scalarSpace, scalar_props));
67 event_datasets_[
"subrun_id"] = std::make_unique<HighFiveDatasetHelper>(headerGroup.createDataSet<uint32_t>(
"subrun_id", scalarSpace, scalar_props));
68 event_datasets_[
"event_id"] = std::make_unique<HighFiveDatasetHelper>(headerGroup.createDataSet<uint32_t>(
"event_id", scalarSpace, scalar_props));
69 event_datasets_[
"sequenceID"] = std::make_unique<HighFiveDatasetHelper>(headerGroup.createDataSet<uint64_t>(
"sequenceID", scalarSpace, scalar_props));
70 event_datasets_[
"is_complete"] = std::make_unique<HighFiveDatasetHelper>(headerGroup.createDataSet<uint8_t>(
"is_complete", scalarSpace, scalar_props));
72 TLOG(TLVL_DEBUG) <<
"HighFiveNtupleDataset Constructor END";
77 TLOG(TLVL_DEBUG) <<
"~HighFiveNtupleDataset BEGIN/END";
83 TLOG(TLVL_TRACE) <<
"insertOne BEGIN";
84 auto fragSize = frag.size();
85 auto rows =
static_cast<size_t>(floor(fragSize / static_cast<double>(nWordsPerRow_))) + (fragSize % nWordsPerRow_ == 0 ? 0 : 1);
86 TLOG(5) <<
"Fragment size: " << fragSize <<
", rows: " << rows <<
" (nWordsPerRow: " << nWordsPerRow_ <<
")";
88 TLOG(6) <<
"First words of Fragment: 0x" << std::hex << *frag.headerBegin() <<
" 0x" << std::hex << *(frag.headerBegin() + 1) <<
" 0x" << std::hex << *(frag.headerBegin() + 2) <<
" 0x" << std::hex << *(frag.headerBegin() + 3) <<
" 0x" << std::hex << *(frag.headerBegin() + 4);
90 auto seqID = frag.sequenceID();
91 auto fragID = frag.fragmentID();
92 auto timestamp = frag.timestamp();
93 auto type = frag.type();
95 for (
size_t ii = 0; ii < rows; ++ii)
97 TLOG(7) <<
"Writing Fragment fields to datasets";
98 fragment_datasets_[
"sequenceID"]->write(seqID);
99 fragment_datasets_[
"fragmentID"]->write(fragID);
100 fragment_datasets_[
"timestamp"]->write(timestamp);
101 fragment_datasets_[
"type"]->write(type);
102 fragment_datasets_[
"size"]->write(fragSize);
103 fragment_datasets_[
"index"]->write(ii * nWordsPerRow_);
105 auto wordsThisRow = (ii + 1) * nWordsPerRow_ <= fragSize ? nWordsPerRow_ : fragSize - (ii * nWordsPerRow_);
106 fragment_datasets_[
"payload"]->write(frag.headerBegin() + (ii * nWordsPerRow_), wordsThisRow);
108 TLOG(TLVL_TRACE) <<
"insertOne END";
113 TLOG(TLVL_TRACE) <<
"insertHeader BEGIN";
114 event_datasets_[
"run_id"]->write(hdr.run_id);
115 event_datasets_[
"subrun_id"]->write(hdr.subrun_id);
116 event_datasets_[
"event_id"]->write(hdr.event_id);
117 event_datasets_[
"sequenceID"]->write(hdr.sequence_id);
118 event_datasets_[
"is_complete"]->write(hdr.is_complete);
120 TLOG(TLVL_TRACE) <<
"insertHeader END";
125 TLOG(TLVL_TRACE) <<
"readNextEvent START fragmentIndex_ " << fragmentIndex_;
126 std::unordered_map<artdaq::Fragment::type_t, std::unique_ptr<artdaq::Fragments>> output;
128 auto numFragments = fragment_datasets_[
"sequenceID"]->getDatasetSize();
129 artdaq::Fragment::sequence_id_t currentSeqID = 0;
131 while (fragmentIndex_ < numFragments)
133 TLOG(8) <<
"readNextEvent: Testing Fragment " << fragmentIndex_ <<
" / " << numFragments <<
" to see if it belongs in this event";
134 if (currentSeqID == 0)
136 currentSeqID = fragment_datasets_[
"sequenceID"]->readOne<uint64_t>(fragmentIndex_);
137 TLOG(8) <<
"readNextEvent: Setting current Sequence ID to " << currentSeqID;
140 auto sequence_id = fragment_datasets_[
"sequenceID"]->readOne<uint64_t>(fragmentIndex_);
141 if (sequence_id != currentSeqID)
143 TLOG(8) <<
"readNextEvent: Current sequence ID is " << currentSeqID <<
", next Fragment sequence ID is " << sequence_id <<
", leaving read loop";
147 auto payloadRowSize = fragment_datasets_[
"payload"]->getRowSize();
149 auto type = fragment_datasets_[
"type"]->readOne<uint8_t>(fragmentIndex_);
150 auto size_words = fragment_datasets_[
"size"]->readOne<uint64_t>(fragmentIndex_);
151 auto index = fragment_datasets_[
"index"]->readOne<uint64_t>(fragmentIndex_);
152 artdaq::Fragment frag(size_words - artdaq::detail::RawFragmentHeader::num_words());
154 TLOG(8) <<
"readNextEvent: Fragment has size " << size_words <<
", payloadRowSize is " << payloadRowSize;
155 auto thisRowSize = size_words > payloadRowSize ? payloadRowSize : size_words;
156 auto payloadvec = fragment_datasets_[
"payload"]->read<artdaq::RawDataType>(fragmentIndex_);
157 memcpy(frag.headerBegin(), &(payloadvec[0]), thisRowSize *
sizeof(artdaq::RawDataType));
159 TLOG(8) <<
"readNextEvent: First words of Payload: 0x" << std::hex << payloadvec[0] <<
" 0x" << std::hex << payloadvec[1] <<
" 0x" << std::hex << payloadvec[2] <<
" 0x" << std::hex << payloadvec[3] <<
" 0x" << std::hex << payloadvec[4];
160 TLOG(8) <<
"readNextEvent: First words of Fragment: 0x" << std::hex << *frag.headerBegin() <<
" 0x" << std::hex << *(frag.headerBegin() + 1) <<
" 0x" << std::hex << *(frag.headerBegin() + 2) <<
" 0x" << std::hex << *(frag.headerBegin() + 3) <<
" 0x" << std::hex << *(frag.headerBegin() + 4);
162 while (index + payloadRowSize < size_words)
164 TLOG(8) <<
"readNextEvent: Retrieving additional payload row, index of previous row " << index <<
", payloadRowSize " << payloadRowSize <<
", fragment size words " << size_words;
166 index = fragment_datasets_[
"index"]->readOne<
size_t>(fragmentIndex_);
167 payloadvec = fragment_datasets_[
"payload"]->read<artdaq::RawDataType>(fragmentIndex_);
169 auto thisRowSize = index + payloadRowSize < size_words ? payloadRowSize : size_words - index;
170 memcpy(frag.headerBegin() + index, &(payloadvec[0]), thisRowSize *
sizeof(artdaq::RawDataType));
173 if (!output.count(type))
175 output[type] = std::make_unique<artdaq::Fragments>();
177 TLOG(8) <<
"readNextEvent: Adding Fragment to event map; type=" << type <<
", frag size " << frag.size();
178 output[type]->push_back(frag);
183 TLOG(TLVL_TRACE) <<
"readNextEvent END output.size() = " << output.size();
189 TLOG(TLVL_TRACE) <<
"getEventHeader BEGIN";
190 artdaq::Fragment::sequence_id_t sequence_id = 0;
191 auto numHeaders = event_datasets_[
"sequenceID"]->getDatasetSize();
193 TLOG(9) <<
"getEventHeader: Searching for matching header row";
194 while (sequence_id != seqID && headerIndex_ < numHeaders)
196 sequence_id = event_datasets_[
"sequenceID"]->readOne<uint64_t>(headerIndex_);
197 if (sequence_id != seqID)
203 if (headerIndex_ >= numHeaders)
return nullptr;
205 TLOG(9) <<
"getEventHeader: Matching header found. Populating output";
206 auto runID = event_datasets_[
"run_id"]->readOne<uint32_t>(headerIndex_);
207 auto subrunID = event_datasets_[
"subrun_id"]->readOne<uint32_t>(headerIndex_);
208 auto eventID = event_datasets_[
"event_id"]->readOne<uint32_t>(headerIndex_);
210 artdaq::detail::RawEventHeader hdr(runID, subrunID, eventID, sequence_id);
211 hdr.is_complete = event_datasets_[
"is_complete"]->readOne<uint8_t>(headerIndex_);
213 TLOG(TLVL_TRACE) <<
"getEventHeader END";
214 return std::make_unique<artdaq::detail::RawEventHeader>(hdr);
std::unique_ptr< artdaq::detail::RawEventHeader > getEventHeader(artdaq::Fragment::sequence_id_t const &) override
Read a RawEventHeader from the Dataset (HDF5 file)
void insertHeader(detail::RawEventHeader const &hdr) override
Insert a RawEventHeader into the Dataset (write it to the HDF5 file)
virtual ~HighFiveNtupleDataset() noexcept
HighFiveNtupleDataset Destructor.
std::unordered_map< artdaq::Fragment::type_t, std::unique_ptr< artdaq::Fragments > > readNextEvent() override
Read the next event from the Dataset (HDF5 file)
An implementation of FragmentDataset using the HighFive backend to produce files identical to those p...
void insertOne(Fragment const &frag) override
Insert a Fragment into the Dataset (write it to the HDF5 file)
FragmentDatasetMode mode_
Mode of this FragmentDataset, either FragmentDatasetMode::Write or FragmentDatasetMode::Read.
Base class that defines methods for reading and writing to HDF5 files via various implementation plug...
HighFiveNtupleDataset(fhicl::ParameterSet const &ps)
HighFiveNtupleDataset Constructor.