Skip to content

Commit

Permalink
Merge pull request #268 from DUNE-DAQ/aeoranday/tpm-hdf5-usage
Browse files Browse the repository at this point in the history
TPM HDF5 Usage
  • Loading branch information
aeoranday authored Jan 26, 2024
2 parents edda16b + 8925f5f commit 879ead3
Show file tree
Hide file tree
Showing 4 changed files with 150 additions and 12 deletions.
2 changes: 2 additions & 0 deletions CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@ daq_add_library(TokenManager.cpp LivetimeCounter.cpp
iomanager::iomanager
detdataformats::detdataformats
trgdataformats::trgdataformats
hdf5libs::hdf5libs
Boost::iostreams # Boost::iostreams comes in via readoutlibs
detchannelmaps::detchannelmaps)

Expand Down Expand Up @@ -102,6 +103,7 @@ daq_add_application( set_serialization_speed set_serialization_speed.cxx TEST LI
daq_add_application( taset_serialization taset_serialization.cxx TEST LINK_LIBRARIES trigger)
daq_add_application( print_trigger_type print_trigger_type.cxx TEST LINK_LIBRARIES trigger hdf5libs::hdf5libs CLI11::CLI11)
daq_add_application( print_ds_fragments print_ds_fragments.cxx TEST LINK_LIBRARIES trigger hdf5libs::hdf5libs CLI11::CLI11)
daq_add_application( generate_tpset_from_hdf5 generate_tpset_from_hdf5.cxx TEST LINK_LIBRARIES trigger hdf5libs::hdf5libs CLI11::CLI11)

##############################################################################
# Unit Tests
Expand Down
42 changes: 30 additions & 12 deletions plugins/TriggerPrimitiveMaker.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -100,7 +100,7 @@ TriggerPrimitiveMaker::do_start(const nlohmann::json& args)
std::ref(stream.tpset_sink),
earliest_timestamp_time));
}
for (size_t i=0; i < m_threads.size(); ++i) {
for (size_t i = 0; i < m_threads.size(); ++i) {
std::string name("replay");
name += std::to_string(i);
pthread_setname_np(m_threads[i]->native_handle(), name.c_str());
Expand Down Expand Up @@ -133,27 +133,47 @@ TriggerPrimitiveMaker::do_scrap(const nlohmann::json& /*args*/)
std::vector<TPSet>
TriggerPrimitiveMaker::read_tpsets(std::string filename, int element)
{
std::ifstream file(filename);
if (!file || file.bad()) {
throw BadTPInputFile(ERS_HERE, get_name(), filename);
}

TriggerPrimitive tp;
TPSet tpset;
std::vector<TPSet> tpsets;

uint64_t prev_tpset_number = 0; // NOLINT(build/unsigned)
uint32_t seqno = 0; // NOLINT(build/unsigned)
uint64_t old_time_start = 0; // NOLINT(build/unsigned)

// Prepare input file
std::unique_ptr<hdf5libs::HDF5RawDataFile> input_file = std::make_unique<hdf5libs::HDF5RawDataFile>(filename);

// Check that the file is a TimeSlice type
if (!input_file->is_timeslice_type()) {
throw BadTPInputFile(ERS_HERE, get_name(), filename);
}

std::vector<std::string> fragment_paths = input_file->get_all_fragment_dataset_paths();

// Read in the file and place the TPs in TPSets. TPSets have time
// boundaries ( n*tpset_time_width + tpset_time_offset ), and TPs are placed
// in TPSets based on the TP start time
//
// This loop assumes the input file is sorted by TP start time
while (file >> tp.time_start >> tp.time_over_threshold >> tp.time_peak >> tp.channel >> tp.adc_integral >>
tp.adc_peak >> tp.detid >> tp.type) {
if (tp.time_start >= old_time_start) {
for (std::string& fragment_path : fragment_paths) {
std::unique_ptr<daqdataformats::Fragment> frag = input_file->get_frag_ptr(fragment_path);
// Make sure this fragment is a TriggerPrimitive
if (frag->get_fragment_type() != daqdataformats::FragmentType::kTriggerPrimitive)
continue;
if (frag->get_element_id().subsystem != daqdataformats::SourceID::Subsystem::kTrigger)
continue;

// Prepare TP buffer
size_t num_tps = frag->get_data_size() / sizeof(trgdataformats::TriggerPrimitive);

trgdataformats::TriggerPrimitive* tp_array = static_cast<trgdataformats::TriggerPrimitive*>(frag->get_data());

for (size_t i(0); i < num_tps; i++) {
auto& tp = tp_array[i];
if (tp.time_start < old_time_start) {
ers::warning(UnsortedTP(ERS_HERE, get_name(), tp.time_start));
continue;
}
// NOLINTNEXTLINE(build/unsigned)
uint64_t current_tpset_number = (tp.time_start + m_conf.tpset_time_offset) / m_conf.tpset_time_width;
old_time_start = tp.time_start;
Expand All @@ -179,8 +199,6 @@ TriggerPrimitiveMaker::read_tpsets(std::string filename, int element)
tpset.objects.clear();
}
tpset.objects.push_back(tp);
} else {
ers::warning(UnsortedTP(ERS_HERE, get_name(), tp.time_start));
}
}
if (!tpset.objects.empty()) {
Expand Down
2 changes: 2 additions & 0 deletions plugins/TriggerPrimitiveMaker.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,8 @@
#include "trigger/triggerprimitivemaker/Nljs.hpp"

#include "appfwk/DAQModule.hpp"
#include "daqdataformats/SourceID.hpp"
#include "hdf5libs/HDF5RawDataFile.hpp"
#include "iomanager/Sender.hpp"
#include "triggeralgs/TriggerPrimitive.hpp"
#include "triggeralgs/Types.hpp"
Expand Down
116 changes: 116 additions & 0 deletions test/apps/generate_tpset_from_hdf5.cxx
Original file line number Diff line number Diff line change
@@ -0,0 +1,116 @@
/**
* @file generate_tpset_from_hdf5.cxx Read TP fragments from file and generate a TPSet vector.
* Matches code within TriggerPrimitiveMaker.
*
* This is part of the DUNE DAQ Application Framework, copyright 2020.
* Licensing/copyright details are in the COPYING file that you should have
* received with this code.
*/
#include "CLI/CLI.hpp"

#include "../../plugins/TriggerPrimitiveMaker.hpp"
#include "trgdataformats/TriggerPrimitive.hpp"

#include "daqdataformats/Fragment.hpp"
#include "daqdataformats/FragmentHeader.hpp"
#include "daqdataformats/SourceID.hpp"
#include "daqdataformats/TriggerRecordHeader.hpp"
#include "daqdataformats/Types.hpp"
#include "hdf5libs/HDF5RawDataFile.hpp"

#include <iostream>

int
main(int argc, char** argv)
{
CLI::App app{ "App description" };

std::string filename;
app.add_option("-f,--file", filename, "Input HDF5 file");

CLI11_PARSE(app, argc, argv);

dunedaq::trigger::TPSet tpset;
std::vector<dunedaq::trigger::TPSet> tpsets;

uint64_t prev_tpset_number = 0; // NOLINT(build/unsigned)
uint32_t seqno = 0; // NOLINT(build/unsigned)
uint64_t old_time_start = 0; // NOLINT(build/unsigned)
uint32_t tpset_time_width = 10; // Arbitrary
uint32_t tpset_time_offset = 5; // Arbitrary
uint16_t element = 0; // Arbitrary
uint32_t total_tps = 0;

// Prepare input file
std::unique_ptr<dunedaq::hdf5libs::HDF5RawDataFile> input_file =
std::make_unique<dunedaq::hdf5libs::HDF5RawDataFile>(filename);

// Check that the file is a TimeSlice type
if (!input_file->is_timeslice_type()) {
std::cout << "Not a timeslice type.\n";
return 1;
}

std::vector<std::string> fragment_paths = input_file->get_all_fragment_dataset_paths();

// Read in the file and place the TPs in TPSets. TPSets have time
// boundaries ( n*tpset_time_width + tpset_time_offset ), and TPs are placed
// in TPSets based on the TP start time
//
// This loop assumes the input file is sorted by TP start time
for (std::string& fragment_path : fragment_paths) {
std::unique_ptr<dunedaq::daqdataformats::Fragment> frag = input_file->get_frag_ptr(fragment_path);
// Make sure this fragment is a TriggerPrimitive
if (frag->get_fragment_type() != dunedaq::daqdataformats::FragmentType::kTriggerPrimitive)
continue;
if (frag->get_element_id().subsystem != dunedaq::daqdataformats::SourceID::Subsystem::kTrigger)
continue;

// Prepare TP buffer
size_t num_tps = frag->get_data_size() / sizeof(dunedaq::trgdataformats::TriggerPrimitive);
total_tps += num_tps;

dunedaq::trgdataformats::TriggerPrimitive* tp_array =
static_cast<dunedaq::trgdataformats::TriggerPrimitive*>(frag->get_data());

for (size_t i(0); i < num_tps; i++) {
auto& tp = tp_array[i];
if (tp.time_start < old_time_start) {
std::cout << "TPs are unsorted.\n";
return 1;
}
// NOLINTNEXTLINE(build/unsigned)
uint64_t current_tpset_number = (tp.time_start + tpset_time_offset) / tpset_time_width;
old_time_start = tp.time_start;

// If we crossed a time boundary, push the current TPSet and reset it
if (current_tpset_number > prev_tpset_number) {
tpset.start_time = prev_tpset_number * tpset_time_width + tpset_time_offset;
tpset.end_time = tpset.start_time + tpset_time_width;
tpset.seqno = seqno;
++seqno;

// 12-Jul-2021, KAB: setting origin fields from configuration
tpset.origin.id = element;

tpset.type = dunedaq::trigger::TPSet::Type::kPayload;

if (!tpset.objects.empty()) {
// We don't send empty TPSets, so there's no point creating them
tpsets.push_back(tpset);
}
prev_tpset_number = current_tpset_number;

tpset.objects.clear();
}
tpset.objects.push_back(tp);
}
}
if (!tpset.objects.empty()) {
// We don't send empty TPSets, so there's no point creating them
tpsets.push_back(tpset);
}
std::cout << "Read " << total_tps << " TPs into " << tpsets.size() << " TPSets, from file " << filename << std::endl;

return 0;
}

0 comments on commit 879ead3

Please sign in to comment.