2 #define TRACE_NAME "FragmentNtuple"
4 #include "artdaq-demo-hdf5/HDF5/hep-hpc/FragmentNtuple.hh"
6 #pragma GCC diagnostic push
7 #pragma GCC diagnostic ignored "-Wmissing-braces"
8 #include "hep_hpc/hdf5/make_column.hpp"
9 #include "hep_hpc/hdf5/make_ntuple.hpp"
10 #pragma GCC diagnostic pop
12 #define DO_COMPRESSION 0
14 #define SCALAR_PROPERTIES \
16 hep_hpc::hdf5::PropertyList{H5P_DATASET_CREATE}(&H5Pset_deflate, 7u) \
18 #define ARRAY_PROPERTIES \
20 hep_hpc::hdf5::PropertyList{H5P_DATASET_CREATE}(&H5Pset_deflate, 7u) \
23 #define SCALAR_PROPERTIES \
25 #define ARRAY_PROPERTIES \
32 , nWordsPerRow_(ps.get<size_t>(
"nWordsPerRow", 10240))
33 , fragments_(hep_hpc::hdf5::make_ntuple({file,
"Fragments"},
35 hep_hpc::hdf5::make_scalar_column<uint64_t>(
"sequenceID", SCALAR_PROPERTIES),
36 hep_hpc::hdf5::make_scalar_column<uint16_t>(
"fragmentID", SCALAR_PROPERTIES),
37 hep_hpc::hdf5::make_scalar_column<uint64_t>(
"timestamp", SCALAR_PROPERTIES),
38 hep_hpc::hdf5::make_scalar_column<uint8_t>(
"type", SCALAR_PROPERTIES),
39 hep_hpc::hdf5::make_scalar_column<uint64_t>(
"size", SCALAR_PROPERTIES),
40 hep_hpc::hdf5::make_scalar_column<uint64_t>(
"index", SCALAR_PROPERTIES),
41 hep_hpc::hdf5::make_column<artdaq::RawDataType, 1>(
"payload", nWordsPerRow_, ARRAY_PROPERTIES)))
43 , eventHeaders_(hep_hpc::hdf5::make_ntuple({fragments_.file(),
"EventHeaders"},
45 hep_hpc::hdf5::make_scalar_column<uint32_t>(
"run_id", SCALAR_PROPERTIES),
46 hep_hpc::hdf5::make_scalar_column<uint32_t>(
"subrun_id", SCALAR_PROPERTIES),
47 hep_hpc::hdf5::make_scalar_column<uint32_t>(
"event_id", SCALAR_PROPERTIES),
48 hep_hpc::hdf5::make_scalar_column<uint64_t>(
"sequenceID", SCALAR_PROPERTIES),
49 hep_hpc::hdf5::make_scalar_column<uint8_t>(
"is_complete", SCALAR_PROPERTIES)))
52 TLOG(TLVL_DEBUG) <<
"FragmentNtuple Constructor (file) START";
53 if (mode_ == FragmentDatasetMode::Read)
55 TLOG(TLVL_ERROR) <<
"ToyFragmentDataset configured in read mode but is not capable of reading!";
57 TLOG(TLVL_DEBUG) <<
"FragmentNtuple Constructor (file) END";
62 : FragmentDataset(ps, ps.get<std::string>(
"mode",
"write"))
63 , nWordsPerRow_(ps.get<size_t>(
"nWordsPerRow", 10240))
64 , fragments_(hep_hpc::hdf5::make_ntuple({ps.get<std::string>(
"fileName",
"fragments.hdf5"),
"Fragments"},
66 hep_hpc::hdf5::make_scalar_column<uint64_t>(
"sequenceID", SCALAR_PROPERTIES),
67 hep_hpc::hdf5::make_scalar_column<uint16_t>(
"fragmentID", SCALAR_PROPERTIES),
68 hep_hpc::hdf5::make_scalar_column<uint64_t>(
"timestamp", SCALAR_PROPERTIES),
69 hep_hpc::hdf5::make_scalar_column<uint8_t>(
"type", SCALAR_PROPERTIES),
70 hep_hpc::hdf5::make_scalar_column<uint64_t>(
"size", SCALAR_PROPERTIES),
71 hep_hpc::hdf5::make_scalar_column<uint64_t>(
"index", SCALAR_PROPERTIES),
72 hep_hpc::hdf5::make_column<artdaq::RawDataType, 1>(
"payload", nWordsPerRow_, ARRAY_PROPERTIES)))
74 , eventHeaders_(hep_hpc::hdf5::make_ntuple({fragments_.file(),
"EventHeaders"},
76 hep_hpc::hdf5::make_scalar_column<uint32_t>(
"run_id", SCALAR_PROPERTIES),
77 hep_hpc::hdf5::make_scalar_column<uint32_t>(
"subrun_id", SCALAR_PROPERTIES),
78 hep_hpc::hdf5::make_scalar_column<uint32_t>(
"event_id", SCALAR_PROPERTIES),
79 hep_hpc::hdf5::make_scalar_column<uint64_t>(
"sequenceID", SCALAR_PROPERTIES),
80 hep_hpc::hdf5::make_scalar_column<uint8_t>(
"is_complete", SCALAR_PROPERTIES)))
83 TLOG(TLVL_DEBUG) <<
"FragmentNtuple Constructor START";
84 if (
mode_ == FragmentDatasetMode::Read)
86 TLOG(TLVL_ERROR) <<
"ToyFragmentDataset configured in read mode but is not capable of reading!";
88 TLOG(TLVL_DEBUG) <<
"FragmentNtuple Constructor END";
93 TLOG(TLVL_DEBUG) <<
"FragmentNtuple Destructor START";
95 TLOG(TLVL_DEBUG) <<
"FragmentNtuple Destructor END";
100 TLOG(TLVL_TRACE) <<
"FragmentNtuple::insertOne BEGIN";
102 for (
size_t ii = 0; ii < frag.size(); ii += nWordsPerRow_)
104 if (ii + nWordsPerRow_ <= frag.size())
106 fragments_.insert(frag.sequenceID(), frag.fragmentID(), frag.timestamp(), frag.type(),
107 frag.size(), ii, &frag.headerBegin()[ii]);
111 std::vector<artdaq::RawDataType> words(nWordsPerRow_, 0);
112 std::copy(frag.headerBegin() + ii, frag.dataEnd(), words.begin());
113 fragments_.insert(frag.sequenceID(), frag.fragmentID(), frag.timestamp(), frag.type(),
114 frag.size(), ii, &words[0]);
117 TLOG(TLVL_TRACE) <<
"FragmentNtuple::insertOne END";
122 TLOG(TLVL_TRACE) <<
"FragmentNtuple::insertHeader: Writing header to eventHeaders_ group";
123 eventHeaders_.insert(hdr.run_id, hdr.subrun_id, hdr.event_id, hdr.sequence_id, hdr.is_complete);
void insertOne(artdaq::Fragment const &frag) override
Insert a Fragment into the Fragment Ntuple Dataset (write it to the HDF5 file)
Implemementation of FragmentDataset using hep_hpc Ntuples.
virtual ~FragmentNtuple()
FragmentNtuple Destructor.
FragmentNtuple(fhicl::ParameterSet const &ps, hep_hpc::hdf5::File const &file)
FragmentNtuple Constructor with input hep_hpc::hdf5::File.
void insertHeader(artdaq::detail::RawEventHeader const &hdr) override
Insert a RawEventHeader into the Event Header Ntuple Dataset (write it to the HDF5 file) ...
FragmentDatasetMode mode_
Mode of this FragmentDataset, either FragmentDatasetMode::Write or FragmentDatasetMode::Read.
Base class that defines methods for reading and writing to HDF5 files via various implementation plug...