4 #define TRACE_NAME "HighFiveNtupleDataset"
6 #include "artdaq-core/Data/ContainerFragment.hh"
7 #include "artdaq-demo-hdf5/HDF5/highFive/highFiveNtupleDataset.hh"
14 , nWordsPerRow_(ps.get<size_t>(
"nWordsPerRow", 10240))
17 TLOG(TLVL_DEBUG) <<
"HighFiveNtupleDataset Constructor BEGIN";
18 auto payloadChunkSize = ps.get<
size_t>(
"payloadChunkSize", 128);
19 HighFive::DataSetAccessProps payloadAccessProps;
20 payloadAccessProps.add(HighFive::Caching(12421, ps.get<
size_t>(
"chunkCacheSizeBytes",
sizeof(artdaq::RawDataType) * payloadChunkSize * nWordsPerRow_ * 10), 0.5));
22 if (
mode_ == FragmentDatasetMode::Read)
24 TLOG(TLVL_TRACE) <<
"HighFiveNtupleDataset: Opening input file and getting Dataset pointers";
25 file_ = std::make_unique<HighFive::File>(ps.get<std::string>(
"fileName"), HighFive::File::ReadOnly);
27 auto fragmentGroup = file_->getGroup(
"/Fragments");
28 fragment_datasets_[
"sequenceID"] = std::make_unique<HighFiveDatasetHelper>(fragmentGroup.getDataSet(
"sequenceID"));
29 fragment_datasets_[
"fragmentID"] = std::make_unique<HighFiveDatasetHelper>(fragmentGroup.getDataSet(
"fragmentID"));
30 fragment_datasets_[
"timestamp"] = std::make_unique<HighFiveDatasetHelper>(fragmentGroup.getDataSet(
"timestamp"));
31 fragment_datasets_[
"type"] = std::make_unique<HighFiveDatasetHelper>(fragmentGroup.getDataSet(
"type"));
32 fragment_datasets_[
"size"] = std::make_unique<HighFiveDatasetHelper>(fragmentGroup.getDataSet(
"size"));
33 fragment_datasets_[
"index"] = std::make_unique<HighFiveDatasetHelper>(fragmentGroup.getDataSet(
"index"));
34 fragment_datasets_[
"payload"] = std::make_unique<HighFiveDatasetHelper>(fragmentGroup.getDataSet(
"payload", payloadAccessProps));
35 auto headerGroup = file_->getGroup(
"/EventHeaders");
36 event_datasets_[
"run_id"] = std::make_unique<HighFiveDatasetHelper>(headerGroup.getDataSet(
"run_id"));
37 event_datasets_[
"subrun_id"] = std::make_unique<HighFiveDatasetHelper>(headerGroup.getDataSet(
"subrun_id"));
38 event_datasets_[
"event_id"] = std::make_unique<HighFiveDatasetHelper>(headerGroup.getDataSet(
"event_id"));
39 event_datasets_[
"sequenceID"] = std::make_unique<HighFiveDatasetHelper>(headerGroup.getDataSet(
"sequenceID"));
40 event_datasets_[
"timestamp"] = std::make_unique<HighFiveDatasetHelper>(headerGroup.getDataSet(
"timestamp"));
41 event_datasets_[
"is_complete"] = std::make_unique<HighFiveDatasetHelper>(headerGroup.getDataSet(
"is_complete"));
45 TLOG(TLVL_TRACE) <<
"HighFiveNtupleDataset: Creating output file";
46 file_ = std::make_unique<HighFive::File>(ps.get<std::string>(
"fileName"), HighFive::File::OpenOrCreate | HighFive::File::Truncate);
48 HighFive::DataSetCreateProps scalar_props;
49 scalar_props.add(HighFive::Chunking(std::vector<hsize_t>{128, 1}));
50 HighFive::DataSetCreateProps vector_props;
51 vector_props.add(HighFive::Chunking(std::vector<hsize_t>{payloadChunkSize, nWordsPerRow_}));
53 HighFive::DataSpace scalarSpace = HighFive::DataSpace({0, 1}, {HighFive::DataSpace::UNLIMITED, 1});
54 HighFive::DataSpace vectorSpace = HighFive::DataSpace({0, nWordsPerRow_}, {HighFive::DataSpace::UNLIMITED, nWordsPerRow_});
56 TLOG(TLVL_TRACE) <<
"HighFiveNtupleDataset: Creating Fragment datasets";
57 auto fragmentGroup = file_->createGroup(
"/Fragments");
58 fragment_datasets_[
"sequenceID"] = std::make_unique<HighFiveDatasetHelper>(fragmentGroup.createDataSet<uint64_t>(
"sequenceID", scalarSpace, scalar_props));
59 fragment_datasets_[
"fragmentID"] = std::make_unique<HighFiveDatasetHelper>(fragmentGroup.createDataSet<uint16_t>(
"fragmentID", scalarSpace, scalar_props));
60 fragment_datasets_[
"timestamp"] = std::make_unique<HighFiveDatasetHelper>(fragmentGroup.createDataSet<uint64_t>(
"timestamp", scalarSpace, scalar_props));
61 fragment_datasets_[
"type"] = std::make_unique<HighFiveDatasetHelper>(fragmentGroup.createDataSet<uint8_t>(
"type", scalarSpace, scalar_props));
62 fragment_datasets_[
"size"] = std::make_unique<HighFiveDatasetHelper>(fragmentGroup.createDataSet<uint64_t>(
"size", scalarSpace, scalar_props));
63 fragment_datasets_[
"index"] = std::make_unique<HighFiveDatasetHelper>(fragmentGroup.createDataSet<uint64_t>(
"index", scalarSpace, scalar_props));
64 fragment_datasets_[
"payload"] = std::make_unique<HighFiveDatasetHelper>(fragmentGroup.createDataSet<artdaq::RawDataType>(
"payload", vectorSpace, vector_props, payloadAccessProps), payloadChunkSize);
66 TLOG(TLVL_TRACE) <<
"HighFiveNtupleDataset: Creating EventHeader datasets";
67 auto headerGroup = file_->createGroup(
"/EventHeaders");
68 event_datasets_[
"run_id"] = std::make_unique<HighFiveDatasetHelper>(headerGroup.createDataSet<uint32_t>(
"run_id", scalarSpace, scalar_props));
69 event_datasets_[
"subrun_id"] = std::make_unique<HighFiveDatasetHelper>(headerGroup.createDataSet<uint32_t>(
"subrun_id", scalarSpace, scalar_props));
70 event_datasets_[
"event_id"] = std::make_unique<HighFiveDatasetHelper>(headerGroup.createDataSet<uint32_t>(
"event_id", scalarSpace, scalar_props));
71 event_datasets_[
"sequenceID"] = std::make_unique<HighFiveDatasetHelper>(headerGroup.createDataSet<uint64_t>(
"sequenceID", scalarSpace, scalar_props));
72 event_datasets_[
"timestamp"] = std::make_unique<HighFiveDatasetHelper>(headerGroup.createDataSet<uint64_t>(
"timestamp", scalarSpace, scalar_props));
73 event_datasets_[
"is_complete"] = std::make_unique<HighFiveDatasetHelper>(headerGroup.createDataSet<uint8_t>(
"is_complete", scalarSpace, scalar_props));
75 TLOG(TLVL_DEBUG) <<
"HighFiveNtupleDataset Constructor END";
80 TLOG(TLVL_DEBUG) <<
"~HighFiveNtupleDataset BEGIN/END";
86 TLOG(TLVL_TRACE) <<
"insertOne BEGIN";
87 auto fragSize = frag.size();
88 auto rows =
static_cast<size_t>(floor(fragSize / static_cast<double>(nWordsPerRow_))) + (fragSize % nWordsPerRow_ == 0 ? 0 : 1);
89 TLOG(5) <<
"Fragment size: " << fragSize <<
", rows: " << rows <<
" (nWordsPerRow: " << nWordsPerRow_ <<
")";
91 TLOG(6) <<
"First words of Fragment: 0x" << std::hex << *frag.headerBegin() <<
" 0x" << std::hex << *(frag.headerBegin() + 1) <<
" 0x" << std::hex << *(frag.headerBegin() + 2) <<
" 0x" << std::hex << *(frag.headerBegin() + 3) <<
" 0x" << std::hex << *(frag.headerBegin() + 4);
93 auto seqID = frag.sequenceID();
94 auto fragID = frag.fragmentID();
95 auto timestamp = frag.timestamp();
96 auto type = frag.type();
98 for (
size_t ii = 0; ii < rows; ++ii)
100 TLOG(7) <<
"Writing Fragment fields to datasets";
101 fragment_datasets_[
"sequenceID"]->write(seqID);
102 fragment_datasets_[
"fragmentID"]->write(fragID);
103 fragment_datasets_[
"timestamp"]->write(timestamp);
104 fragment_datasets_[
"type"]->write(type);
105 fragment_datasets_[
"size"]->write(fragSize);
106 fragment_datasets_[
"index"]->write(ii * nWordsPerRow_);
108 auto wordsThisRow = (ii + 1) * nWordsPerRow_ <= fragSize ? nWordsPerRow_ : fragSize - (ii * nWordsPerRow_);
109 fragment_datasets_[
"payload"]->write(frag.headerBegin() + (ii * nWordsPerRow_), wordsThisRow);
111 TLOG(TLVL_TRACE) <<
"insertOne END";
116 TLOG(TLVL_TRACE) <<
"insertHeader BEGIN";
117 event_datasets_[
"run_id"]->write(hdr.run_id);
118 event_datasets_[
"subrun_id"]->write(hdr.subrun_id);
119 event_datasets_[
"event_id"]->write(hdr.event_id);
120 event_datasets_[
"sequenceID"]->write(hdr.sequence_id);
121 event_datasets_[
"timestamp"]->write(hdr.timestamp);
122 event_datasets_[
"is_complete"]->write(hdr.is_complete);
124 TLOG(TLVL_TRACE) <<
"insertHeader END";
129 TLOG(TLVL_TRACE) <<
"readNextEvent START fragmentIndex_ " << fragmentIndex_;
130 std::unordered_map<artdaq::Fragment::type_t, std::unique_ptr<artdaq::Fragments>> output;
132 auto numFragments = fragment_datasets_[
"sequenceID"]->getDatasetSize();
133 artdaq::Fragment::sequence_id_t currentSeqID = 0;
135 while (fragmentIndex_ < numFragments)
137 TLOG(8) <<
"readNextEvent: Testing Fragment " << fragmentIndex_ <<
" / " << numFragments <<
" to see if it belongs in this event";
138 if (currentSeqID == 0)
140 currentSeqID = fragment_datasets_[
"sequenceID"]->readOne<uint64_t>(fragmentIndex_);
141 TLOG(8) <<
"readNextEvent: Setting current Sequence ID to " << currentSeqID;
144 auto sequence_id = fragment_datasets_[
"sequenceID"]->readOne<uint64_t>(fragmentIndex_);
145 if (sequence_id != currentSeqID)
147 TLOG(8) <<
"readNextEvent: Current sequence ID is " << currentSeqID <<
", next Fragment sequence ID is " << sequence_id <<
", leaving read loop";
151 auto payloadRowSize = fragment_datasets_[
"payload"]->getRowSize();
153 auto type = fragment_datasets_[
"type"]->readOne<uint8_t>(fragmentIndex_);
154 auto size_words = fragment_datasets_[
"size"]->readOne<uint64_t>(fragmentIndex_);
155 auto index = fragment_datasets_[
"index"]->readOne<uint64_t>(fragmentIndex_);
156 artdaq::Fragment frag(size_words - artdaq::detail::RawFragmentHeader::num_words());
158 TLOG(8) <<
"readNextEvent: Fragment has size " << size_words <<
", payloadRowSize is " << payloadRowSize;
159 auto thisRowSize = size_words > payloadRowSize ? payloadRowSize : size_words;
160 auto payloadvec = fragment_datasets_[
"payload"]->read<artdaq::RawDataType>(fragmentIndex_);
161 memcpy(frag.headerBegin(), &(payloadvec[0]), thisRowSize *
sizeof(artdaq::RawDataType));
163 TLOG(8) <<
"readNextEvent: First words of Payload: 0x" << std::hex << payloadvec[0] <<
" 0x" << std::hex << payloadvec[1] <<
" 0x" << std::hex << payloadvec[2] <<
" 0x" << std::hex << payloadvec[3] <<
" 0x" << std::hex << payloadvec[4];
164 TLOG(8) <<
"readNextEvent: First words of Fragment: 0x" << std::hex << *frag.headerBegin() <<
" 0x" << std::hex << *(frag.headerBegin() + 1) <<
" 0x" << std::hex << *(frag.headerBegin() + 2) <<
" 0x" << std::hex << *(frag.headerBegin() + 3) <<
" 0x" << std::hex << *(frag.headerBegin() + 4);
166 while (index + payloadRowSize < size_words)
168 TLOG(8) <<
"readNextEvent: Retrieving additional payload row, index of previous row " << index <<
", payloadRowSize " << payloadRowSize <<
", fragment size words " << size_words;
170 index = fragment_datasets_[
"index"]->readOne<
size_t>(fragmentIndex_);
171 payloadvec = fragment_datasets_[
"payload"]->read<artdaq::RawDataType>(fragmentIndex_);
173 auto thisRowSize = index + payloadRowSize < size_words ? payloadRowSize : size_words - index;
174 memcpy(frag.headerBegin() + index, &(payloadvec[0]), thisRowSize *
sizeof(artdaq::RawDataType));
177 if (output.count(type) == 0u)
179 output[type] = std::make_unique<artdaq::Fragments>();
181 TLOG(8) <<
"readNextEvent: Adding Fragment to event map; type=" << type <<
", frag size " << frag.size();
182 output[type]->push_back(frag);
187 TLOG(TLVL_TRACE) <<
"readNextEvent END output.size() = " << output.size();
193 TLOG(TLVL_TRACE) <<
"getEventHeader BEGIN";
194 artdaq::Fragment::sequence_id_t sequence_id = 0;
195 auto numHeaders = event_datasets_[
"sequenceID"]->getDatasetSize();
197 TLOG(9) <<
"getEventHeader: Searching for matching header row";
198 while (sequence_id != seqID && headerIndex_ < numHeaders)
200 sequence_id = event_datasets_[
"sequenceID"]->readOne<uint64_t>(headerIndex_);
201 if (sequence_id != seqID)
207 if (headerIndex_ >= numHeaders)
212 TLOG(9) <<
"getEventHeader: Matching header found. Populating output";
213 auto runID = event_datasets_[
"run_id"]->readOne<uint32_t>(headerIndex_);
214 auto subrunID = event_datasets_[
"subrun_id"]->readOne<uint32_t>(headerIndex_);
215 auto eventID = event_datasets_[
"event_id"]->readOne<uint32_t>(headerIndex_);
216 auto timestamp = event_datasets_[
"timestamp"]->readOne<uint64_t>(headerIndex_);
218 artdaq::detail::RawEventHeader hdr(runID, subrunID, eventID, sequence_id, timestamp);
219 hdr.is_complete = (event_datasets_[
"is_complete"]->readOne<uint8_t>(headerIndex_) != 0u);
221 TLOG(TLVL_TRACE) <<
"getEventHeader END";
222 return std::make_unique<artdaq::detail::RawEventHeader>(hdr);
std::unique_ptr< artdaq::detail::RawEventHeader > getEventHeader(artdaq::Fragment::sequence_id_t const &) override
Read a RawEventHeader from the Dataset (HDF5 file)
void insertHeader(detail::RawEventHeader const &hdr) override
Insert a RawEventHeader into the Dataset (write it to the HDF5 file)
virtual ~HighFiveNtupleDataset() noexcept
HighFiveNtupleDataset Destructor.
std::unordered_map< artdaq::Fragment::type_t, std::unique_ptr< artdaq::Fragments > > readNextEvent() override
Read the next event from the Dataset (HDF5 file)
An implementation of FragmentDataset using the HighFive backend to produce files identical to those p...
void insertOne(Fragment const &frag) override
Insert a Fragment into the Dataset (write it to the HDF5 file)
FragmentDatasetMode mode_
Mode of this FragmentDataset, either FragmentDatasetMode::Write or FragmentDatasetMode::Read.
Base class that defines methods for reading and writing to HDF5 files via various implementation plug...
HighFiveNtupleDataset(fhicl::ParameterSet const &ps)
HighFiveNtupleDataset Constructor.