diff --git a/.readthedocs.yml b/.readthedocs.yaml similarity index 62% rename from .readthedocs.yml rename to .readthedocs.yaml index b22a1bc2c69..dcda1939df0 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yaml @@ -8,8 +8,13 @@ version: 2 mkdocs: configuration: mkdocs.yml +# Set the version of Python and other tools you might need +build: + os: ubuntu-22.04 + tools: + python: "3.8" + # Set the version of Python and requirements required to build your docs python: - version: 3.8 - install: - - requirements: docs/requirements.txt + install: + - requirements: docs/requirements.txt diff --git a/bin/mkdocs_skeleton.yml b/bin/mkdocs_skeleton.yml index 84740a148de..30ff1447093 100644 --- a/bin/mkdocs_skeleton.yml +++ b/bin/mkdocs_skeleton.yml @@ -122,12 +122,15 @@ nav: - Table of Contents: - Overview: - README.md + - Release Notes: + - release_notes.md - Toolset: - daq-assettools: - daq-buildtools: - daq-cmake: - daq-release: - - daq-systemtest: + - daqpytools: + - daqsystemtest: - integrationtest: - styleguide: - Core: @@ -137,34 +140,45 @@ nav: - logging: - utilities: - Readout: + - asiolibs: + - ctbmodules: - daqdataformats: + - datahandlinglibs: - detchannelmaps: - detdataformats: - - dtpcontrols: - - dtpctrllibs: + - dpdklibs: + - fddetdataformats: - fdreadoutlibs: + - fdreadoutmodules: - flxlibs: - - lbrulibs: - - readoutlibs: - - readoutmodules: + - hermesmodules: + - snbmodules: + - tdemodules: - wibmod: - Control: + - appmodel: + - confmodel: + - connectivityserver: - daqconf: - - nanorc: + - dbe: + - drunc: - restcmd: + - runconftools: + - runconf-ui: - Dataflow (logical): - dfmessages: - dfmodules: - hdf5libs: - timing: - timinglibs: + - tpglibs: + - trgtools: - trigger: - Dataflow (physical): - iomanager: - ipm: - serialization: - Monitoring: - - dqm: - erskafka: - kafkaopmon: - opmonlib: diff --git a/bin/the_final_markdown.sh b/bin/the_final_markdown.sh index 565d0206c96..721a90d5d62 100755 --- a/bin/the_final_markdown.sh +++ b/bin/the_final_markdown.sh @@ -5,7 +5,7 @@ here=$(cd $(dirname $(readlink -f ${BASH_SOURCE})) && pwd) # Reverse alphabetical order # for package development themselves -package_list="wibmod utilities trigger timinglibs timing styleguide serialization restcmd readoutmodules readoutlibs rcif rawdatautils opmonlib ndreadoutlibs nanorc kafkaopmon logging listrev lbrulibs hdf5libs ipm iomanager integrationtest flxlibs fdreadoutlibs erskafka ers dtpctrllibs dtpcontrols dqm dfmodules dfmessages detdataformats detchannelmaps daqdataformats daqconf daq-systemtest daq-release daq-cmake daq-buildtools daq-assettools cmdlib appfwk" +package_list="wibmod utilities trigger trgtools tpglibs timinglibs timing tdemodules styleguide snbmodules serialization runconf-ui runconftools restcmd rawdatautils opmonlib kafkaopmon logging listrev hermesmodules hdf5libs ipm iomanager integrationtest flxlibs fdreadoutmodules fdreadoutlibs fddetdataformats erskafka ers drunc dpdklibs dfmodules dfmessages detdataformats detchannelmaps dbe datahandlinglibs daqdataformats daqconf daqsystemtest daq-release daqpytools daq-cmake daq-buildtools daq-assettools ctbmodules connectivityserver confmodel cmdlib asiolibs appfwk appmodel" mkdocs_yml="$here/../mkdocs.yml" @@ -115,11 +115,11 @@ for package in $package_list ; do # themselves being updated if [[ "$package" =~ "daq-buildtools" ]]; then - git checkout dunedaq-v3.2.2_for_docs + git checkout fddaq-v5.5.0_for_docs elif [[ "$package" =~ "daq-cmake" ]]; then - git checkout dunedaq-v3.2.0_for_docs + git checkout v3.2.1 else - git checkout develop + git checkout coredaq-v5.5.0 || git checkout fddaq-v5.5.0 fi echo $tmpdir/$package @@ -151,9 +151,9 @@ for package in $package_list ; do fi for mdfile in $( find . -mindepth 2 -type f -not -type l -not -regex ".*\.git.*" -not -regex "\./docs.*" -name "*.md" ); do - reldir=$( echo $mdfile | sed -r 's!(.*)/.*!\1!' ) - mkdir -p $packages_dir/$package/$reldir - cp -p $mdfile $packages_dir/$package/$reldir + reldir=$( echo "$mdfile" | sed -r 's!(.*)/.*!\1!' ) + mkdir -p "$packages_dir/$package/$reldir" + cp -p "$mdfile" "$packages_dir/$package/$reldir" if [[ "$?" != "0" ]]; then echo "There was a problem copying $mdfile to $packages_dir/$package/$reldir in $PWD; exiting..." >&2 exit 3 diff --git a/docs/README.md b/docs/README.md index 8e247f586df..1a12bd0db49 100644 --- a/docs/README.md +++ b/docs/README.md @@ -18,10 +18,12 @@ To learn about the C++ coding guidelines which DUNE DAQ package developers shoul To learn how to run integration tests within our framework, go [here](packages/integrationtest/README.md) -To learn about how to run even more comprehensive tests (particularly relevant during a DUNE DAQ release period), read about the [daq-systemtest package](packages/daq-systemtest/README.md) +To learn about how to run even more comprehensive tests (particularly relevant during a DUNE DAQ release period), read about the [daqsystemtest package](packages/daqsystemtest/README.md) To learn how to keep track of assets (files, etc. which affect the behavior of the DAQ but aren't part of a standard software package), go [here](packages/daq-assettools/README.md) +Tools to simplify DAQ development in Python can be found in [daqpytools](packages/daqpytools/README.md) + -------------- For the other packages, please click on one of the links below. To learn how to edit a package's documentation, click [here](editing_package_documentation.md). Packages marked with an asterix don't yet have any official documentation; please see their Issues page to remedy this. @@ -40,40 +42,56 @@ For the other packages, please click on one of the links below. To learn how to ### Readout +[asiolibs](packages/asiolibs/README.md) _Boost.Asio-based socket reader plugin for low-bandwidth devices_ + +[ctbmodules](packages/ctbmodules/README.md) _Modules for the Central Trigger Board hardware_ + [daqdataformats](packages/daqdataformats/README.md) _DAQ data formats_ +[datahandlinglibs](packages/datahandlinglibs/README.md) _Tools for constructing readout-focused processes_ + [detchannelmaps](packages/detchannelmaps/README.md) _Channel maps for the detectors_ -[detdataformats](packages/detdataformats/README.md) _Data formats for the detectors_ +[detdataformats](packages/detdataformats/README.md) _General-purpose data formats and related tools_ -[dtpctrllibs](packages/dtpctrllibs/README.md) _DAQ modules for controlling Trigger Primitive generation firmware_ +[dpdklibs](packages/dpdklibs/README.md) _Data Plane Development Kit software and utilities_ -[dtpcontrols](packages/dtpcontrols/README.md) _Python tools for control of the Trigger Primitive firmware_ +[fddetdataformats](packages/fddetdataformats/README.md) _Bitfields of far detector raw data and related tools_ [fdreadoutlibs](packages/fdreadoutlibs/README.md) _Classes for working with far detector data (WIB, SSP, etc.)_ -[flxlibs](packages/flxlibs/README.md) _DAQModules, utilities, and scripts for Upstream FELIX Readout Software_ +[fdreadoutmodules](packages/fdreadoutmodules/README.md) _Readout plugin collection for the far detector_ -[lbrulibs](packages/lbrulibs/README.md) _DAQModules, utilities, and scripts for DUNE-ND Upstream DAQ Low Bandwidth Readout Unit_ +[flxlibs](packages/flxlibs/README.md) _DAQModules, utilities, and scripts for Upstream FELIX Readout Software_ -[ndreadoutlibs](packages/ndreadoutlibs/README.md) _Classes for working with near detector data (e.g. PACMAN)_ +[hermesmodules](packages/hermesmodules/README.md) _Modules for the Hermes core_ -[readoutlibs](packages/readoutlibs/README.md) _Base classes for construction of readout-related DAQModules_ +[snbmodules](packages/snbmodules/README.md) _Modules for supernova detection_ -[readoutmodules](packages/readoutmodules/README.md) _DAQModules for constructing readout-focused processes_ +[tdemodules](packages/tdemodules/README.md) _Modules for controlling the Top Drift Electronics' Advanced Mezzanine Cards (AMC)_ [wibmod](packages/wibmod/README.md) _WIB configuration and monitoring interface_ ### Control +[appmodel](packages/appmodel/README.md) _Schema for DAQ configuration of readout, dataflow and trigger applications_ + +[confmodel](packages/confmodel/README.md) _A core schema for DAQ configuration_ + +[connectivityserver](packages/connectivityserver/README.md) _Serves connection information to DAQ applications_ + [daqconf](packages/daqconf/README.md) _application to read out Felix data and store it in HDF5 files on disk_ -[nanorc](packages/nanorc/README.md) _Not ANOther Run Control_ +[dbe](packages/dbe/README.md) _A GUI interface for the OKS-based configuration design_ -[* rcif](packages/rcif/README.md) _run control related_ +[drunc](packages/drunc/README.md) _Run control infrastructure for a distributed DAQ system_ [restcmd](packages/restcmd/README.md) _HTTP REST backend based CommandFacility_ +[runconftools](packages/runconftools/README.md) _Constructs configurations from a base of ehn1 configurations_ + +[runconf-ui](packages/runconf-ui/README.md) _An interface which lets shifters enable/disable elements of the detector_ + ### Dataflow (logical) [dfmessages](packages/dfmessages/README.md) _dataflow messages_ @@ -86,6 +104,10 @@ For the other packages, please click on one of the links below. To learn how to [timinglibs](packages/timinglibs/README.md) _timing control and monitoring_ +[tpglibs](packages/tpglibs/README.md) _Processes raw waveforms and returns the generated trigger primitives_ + +[trgtools](packages/trgtools/README.md) _trigger emulation and analysis tools_ + [trigger](packages/trigger/README.md) _modules that make up the DUNE FD DAQ trigger system_ ### Dataflow (physical) @@ -98,8 +120,6 @@ For the other packages, please click on one of the links below. To learn how to ### Monitoring -[dqm](packages/dqm/README.md) _Data Quality Monitor_ - [erskafka](packages/erskafka/README.md) _the erskafka plugin_ [kafkaopmon](packages/kafkaopmon/README.md) _converts JSON objects into [Kafka](https://en.wikipedia.org/wiki/Apache_Kafka) messages_ @@ -111,8 +131,3 @@ For the other packages, please click on one of the links below. To learn how to ### Educational [listrev](packages/listrev/README.md) _educational example of DAQModules for new developers_ - ------- - -_Mar-11-2021: For software coordinators only:_ [how to make edits to this webpage](how_to_make_edits.md) - diff --git a/docs/how_to_make_edits.md b/docs/how_to_make_edits.md index 09d6294d42a..ba87d27bd66 100644 --- a/docs/how_to_make_edits.md +++ b/docs/how_to_make_edits.md @@ -1,5 +1,5 @@ -_JCF, Jul-1-2021: The following is currently intended just for members of the Software Coordination group_ +_JCF, Feb-28-2024: The following is currently intended just for members of the Software Coordination group_ # How the official documentation works diff --git a/docs/packages/appfwk/ActionPlans.md b/docs/packages/appfwk/ActionPlans.md new file mode 100755 index 00000000000..9154529678d --- /dev/null +++ b/docs/packages/appfwk/ActionPlans.md @@ -0,0 +1,169 @@ +# Action Plans + +## Overview + +An ActionPlan defines a series of steps consisting of groups of modules, which are executed in response to a command from CCM. Groups of modules are defined either by module class or by module instances, and the execution of each step is in parallel by default, but can be changed to serial execution if needed. Each ActionPlan is associated with a FSMCommand object, and is run by the appliction when it recieves the corresponding command. If a command is received and no ActionPlan is defined, the application currently runs a "dummy" ActionPlan consisting of a single step where modules with the command registered are all run in parallel. + +Action Plans allow for much finer-grained control over the execution of a command within an application, allowing for modules that have dependencies on one another to execute their commands correctly. It also introduces parallelization of command execution within each step, which helps with certain time-consuming module commands (e.g. configuring hardware on a link). The current implmentation uses std::future objects and a catch-all threading pattern to ensure that errors executing steps within an action plan do not lead to program crashes. + +## Defining an ActionPlan + +ActionPlans are defined in configuration using these objects: + +```XML + + + + + + + + + + + + + + + + + + + +``` + + + +1. ActionPlan relates a set of DAQModule groups to a FSMCommand instance. + + +1. DAQModules can be grouped by type (C++ class) or by Id (module instance reference) + + +1. ActionPlan has a "execution_policy" attribute which sets whether the modules referenced by each step should execute the command in parallel or in series. (Steps are always executed in series, but within each step, modules can receive the command in parallel or again in series.) + +ActionPlans are validated by the application to ensure that every module type has registered methods corresponding to the command linked to the ActionPlan, and that only one ActionPlan is linked to the application for a given command. Most DUNE-DAQ applications are SmartDaqApplications, which may generate module instances using predefined rules. This can complicate the usage of DaqModulesGroupById and this mode should be used with caution. Note that FSMCommand objects are usually defined by the CCM and included in a fsm.data.xml OKS database. + +### Example test/config/appfwk.data.xml + +The DAQModuleManager_test unit test defines several ActionPlans used within the test. For example, the "do_stuff" action: + +```XML + + + + + + + + + + + + + + + + + + + +``` + +Here, the FSMCommand is defined in the file because it is a non-standard command used for the purposes of this unit test. Normally, the FSMCommand instance would not be defined in the same location as the ActionPlan that uses it. The Action plan defines a single step where all modules in the "dummymodules_type_group" receive the "stuff" command in parallel (since parallel step execution is the default behavior when execution_policy is not specified). "dummymodules_type_group" groups all modules of type DummyModule. + +Generally, FSMCommands are defined by the CCM group and the basic commands can be found in [fsm.data.xml](https://github.com/DUNE-DAQ/daqsystemtest/blob/develop/config/daqsystemtest/fsm.data.xml), but ActionPlan instances and DaqModulesGroupByType/ID instances are user-defined (examples are in moduleconfs.data.xml, described below). + +The user-defined ActionPlans (such as "stuff" above) are associated with the Application instance as follows: + +```XML + + + + + + + + + + + +``` + +## Notes + + +* DAQModules register their action methods with the DAQModuleManager, and this information is used in validation of the ActionPlans (e.g. that every DAQModule that has registered a command is called by the corresponding ActionPlan). + +* ActionPlans refer to FSMCommand objects as defined by the CCM. New FSMCommands may be added, but should be integrated into the state machine in consultation with CCM experts. + +* Within each step of an ActionPlan, whether executing in series or in parallel, the modules will be called in the order in which they are declared to the Application. In series mode, the future has `wait()` called for each module in the step, and in parallel mode, the futures are all started and the results are collected by a loop which calls `wait()` on each individually. + +## Further Examples + +### https://github.com/DUNE-DAQ/daqsystemtest/blob/develop/config/daqsystemtest/moduleconfs.data.xml#L134 + +```XML + + + + + + + + + + +``` + +This ActionPlan consists of four steps, with each step sending the command to the modules matched by the group in parallel. Therefore: + + +1. the FragmentAggregator will run first (there is only one FA per app), + + +1. when the FA is complete, all of the TP Handlers will receive "start" in parallel + + +1. once they are all complete, then the DataLinkHandlers + + +1. finally the FDFakeReaderModules + +### https://github.com/DUNE-DAQ/daqsystemtest/blob/develop/config/daqsystemtest/moduleconfs.data.xml#L199 + +```XML + + + + + + + + +``` + +This ActionPlan, by contrast, uses the modules-in-series execution policy, so for an application with "ta-handler-01", "ta-handler-02", and "ta-subscriber-01", the ActionPlan will result in: + + +1. (Step 1A) "ta-handler-01" executing "start" + + +1. (Step 1B) once that is complete, "ta-handler-02" will execute "start" + + +1. (Step 2) finally "ta-subscriber-01" will receive "start" + + +----- + + +_Last git commit to the markdown source of this page:_ + + +_Author: Eric Flumerfelt_ + +_Date: Mon Jul 21 15:31:54 2025 -0500_ + +_If you see a problem with the documentation on this page, please file an Issue at [https://github.com/DUNE-DAQ/appfwk/issues](https://github.com/DUNE-DAQ/appfwk/issues)_ + diff --git a/docs/packages/appfwk/Daq-Application.md b/docs/packages/appfwk/Daq-Application.md index 23189dcc80e..6068450a78f 100755 --- a/docs/packages/appfwk/Daq-Application.md +++ b/docs/packages/appfwk/Daq-Application.md @@ -24,7 +24,7 @@ daq_application known arguments (additional arguments will be stored and passed # Usage Notes -As of v2.6.0, `daq_application` will seldom have to be called directly, instead the preferred method of starting _dunedaq_ applications will be to use one of the Run Control products, such as `nanorc`. +As of v2.6.0, `daq_application` will seldom have to be called directly, instead the preferred method of starting _dunedaq_ applications will be to use one of the Run Control products, such as `nanorc` or `drunc`. ----- @@ -33,9 +33,9 @@ As of v2.6.0, `daq_application` will seldom have to be called directly, instead _Last git commit to the markdown source of this page:_ -_Author: glehmannmiotto_ +_Author: Marco Roda_ -_Date: Fri Jul 15 15:54:07 2022 +0200_ +_Date: Thu Jul 4 10:47:11 2024 +0200_ _If you see a problem with the documentation on this page, please file an Issue at [https://github.com/DUNE-DAQ/appfwk/issues](https://github.com/DUNE-DAQ/appfwk/issues)_ diff --git a/docs/packages/appfwk/README.md b/docs/packages/appfwk/README.md index 755a2a72d8e..62dfd567583 100644 --- a/docs/packages/appfwk/README.md +++ b/docs/packages/appfwk/README.md @@ -12,7 +12,7 @@ appfwk consists of a generic DAQ application (`daq_application`) which can be co appfwk provides the scaffolding on which all DUNE DAQ software processes can be developed. The running DAQ typically consists of multiple distinct processes assigned various tasks: filtering data, requesting it, saving it to storage, etc. There are many different types of process, some of which may not even have been conceived of yet, and it would be cumbersome to recompile multiple different types of process across many packages every time one wanted to change the behavior of the DAQ. To solve this problem, the approach that's been taken is to have a standard DUNE DAQ software process [`daq_application`](Daq-Application.md) which can be configured at runtime by Run Control in order to perform some particular function in the DAQ. -`daq_application` is designed as a flexible container of "DAQ modules" (units of code designed to perform specific tasks) and "connections" (designed to move data between DAQ modules and DAQ applications). These specific tasks can vary widely; they include [producing fake data for testing purposes](https://github.com/DUNE-DAQ/readoutmodules/blob/develop/plugins/FakeCardReader.hpp), [putting data into long term storage](https://github.com/DUNE-DAQ/dfmodules/blob/develop/plugins/DataWriter.hpp), and so forth. DAQ modules will typically execute user-defined functions when receiving standard transitions from Run Control: "conf", "start", etc. appfwk provides the `DAQModule` base class which users should derive their DAQ module class from in their own packages. +`daq_application` is designed as a flexible container of "DAQ modules" (units of code designed to perform specific tasks) and "connections" (designed to move data between DAQ modules that can be in the same or in different DAQ applications). These specific tasks can vary widely; they include [producing fake data for testing purposes](https://github.com/DUNE-DAQ/readoutmodules/blob/develop/plugins/FakeCardReader.hpp), [putting data into long term storage](https://github.com/DUNE-DAQ/dfmodules/blob/develop/plugins/DataWriterModule.hpp), and so forth. DAQ modules will typically execute user-defined functions when receiving standard transitions from Run Control: "conf", "start", etc. appfwk provides the `DAQModule` base class which users should derive their DAQ module class from in their own packages. Read more about ActionPlans [here](ActionPlans.md). ![daq_application](https://github.com/DUNE-DAQ/appfwk/raw/develop/docs/Application.png) @@ -25,16 +25,21 @@ In general, in a full blown DAQ system users won't be running `daq_application` ### Basics of the `DAQModule` interface + + **_Be aware that much of the boilerplate code described below can be automatically generated using the [create_dunedaq_package script](https://dune-daq-sw.readthedocs.io/en/latest/packages/daq-cmake/#the-create_dunedaq_package-script)_** + When implenting a DAQ module, you'll want to `#include` the [`DAQModule.hpp` header](https://github.com/DUNE-DAQ/appfwk/blob/develop/include/appfwk/DAQModule.hpp), and derive your DAQ module from the `DAQModule` base class. The most important parts of `DAQModule.hpp` to an implementor of a DAQ module are the following: -* `DEFINE_DUNE_DAQ_MODULE`: This is a macro which should be "called" at the bottom of your DAQ module's source file with an "argument" of the form `dunedaq::::`. E.g., `DEFINE_DUNE_DAQ_MODULE(dunedaq::dfmodules::DataWriter)` [at the bottom of the dfmodules package's DataWriter module's source file](https://github.com/DUNE-DAQ/dfmodules/blob/develop/plugins/DataWriter.cpp) +* `DEFINE_DUNE_DAQ_MODULE`: This is a macro which should be "called" at the bottom of your DAQ module's source file with an "argument" of the form `dunedaq::::`. E.g., `DEFINE_DUNE_DAQ_MODULE(dunedaq::dfmodules::DataWriterModule)` [at the bottom of the dfmodules package's DataWriterModule module's source file](https://github.com/DUNE-DAQ/dfmodules/blob/develop/plugins/DataWriterModule.cpp) + +* `register_command`: takes as arguments the name of a command and a function which should execute when the command is received. The function is user defined, and takes an instance of `DAQModule::data_t` as argument. `DAQModule::data_t` is aliased to the `nlohmann::json` type and can thus be thought of as a blob of JSON-structured data. While in principle any arbitary name could be associated with any function of arbitrary behavior to create a command, in practice implementors of DAQ modules define commands associated with the DAQ's state machine: "_conf_", "_start_", "_stop_", "_scrap_". Not all DAQ modules necessarily need to perform an action for each of those transitions; e.g., a module may only be designed to do something during configuration, and not change as the DAQ enters the running state ("_start_") or exits it ("_stop_"). It also supports an optional third argument which lists the states that the application must be in for the command to be valid. [!!!Control People here should make comments and see if this is correct, if it's sitll the plan, etc] -* `register_command`: takes as arguments the name of a command and a function which should execute when the command is received. The function is user defined, and takes an instance of `DAQModule::data_t` as argument. `DAQModule::data_t` is aliased to the `nlohmann::json` type and can thus be thought of as a blob of JSON-structured data. While in principle any arbitary name could be associated with any function of arbitrary behavior to create a command, in practice implementors of DAQ modules define commands associated with the DAQ's state machine: "_conf_", "_start_", "_stop_", "_scrap_". Not all DAQ modules necessarily need to perform an action for each of those transitions; e.g., a module may only be designed to do something during configuration, and not change as the DAQ enters the running state ("_start_") or exits it ("_stop_"). It also supports an optional third argument which lists the states that the application must be in for the command to be valid. + * **register_command must be called in the DAQModule Constructor!** -* `init`: this pure virtual function's implementation is meant to create objects which are persistent for the lifetime of the DAQ module. It takes as an argument the type `DAQModule::data_t`. Typically it will use parameters from this JSON argument to define the persistent objects. For persistent objects of types which don't have an efficient copy assigment operator, a common technique is to declare as member data a `unique_ptr` to the type of interest and then, in `init`, to allocate the desired object on the heap using values from the JSON and point the `unique_ptr` member to it. Connection objects are commonly allocated in `init`; they'll be described in more detail later in this document. +* `init`: this pure virtual function's implementation is meant to create objects which are persistent for the lifetime of the DAQ module. It also has the unique role of connecting the DAQModel with its own configuration object, see later the init section for more details. It takes as an argument the type `std::shared_ptr`. Typically, `init` will query the `ConfigurationManager`, extract the configuration object specifically defined for this `DAQModule` and will store the pointer internally to the class for later usage, when the dedicated commands comes, usually `conf`. Connection, as they are persistent objects, are commonly allocated in `init`; they'll be described in more detail later in this document. An conceptual example of what this looks like is the following simplified version of a DAQ module implementation. -``` +```C++ // This file would be called plugins/MyDaqModule.hpp // Functions would typically be defined in plugins/MyDaqModule.cpp @@ -49,7 +54,7 @@ class MyDaqModule : public dunedaq::appfwk::DAQModule { register_command("scrap", &MyDAQModule::do_scrap); } - void init(const data_t& init_data) override; + void init(std::shared_ptr) override; private: @@ -57,6 +62,8 @@ class MyDaqModule : public dunedaq::appfwk::DAQModule { void do_start(const data_t& start_data); void do_stop(const data_t& stop_data); void do_scrap(const data_t& scrap_data); + + const MyDAQModuleConf * m_cfg = nullptr; }; ``` @@ -78,37 +85,54 @@ A word needs to be said about the concept of a "unique name" here. Looking in [` ### The `init` function -Already touched upon above, this function takes a `data_t` instance (i.e., JSON) to tell it what objects to make persistent over the DAQ module's lifetime. A very common example of this is the construction of the `iomanager` connections which will pipe data into and out of an instance of the DAQ module. A description of this common use case will illustrate a couple of very important aspects of DAQ module programming. +Already touched upon above, this function takes a `std::shared_ptr` instance to tell it what objects to make persistent over the DAQ module's lifetime. A very common example of this is the construction of the `iomanager` connections which will pipe data into and out of an instance of the DAQ module. A description of this common use case will illustrate a couple of very important aspects of DAQ module programming. When a DAQ module writer wants to communicate with other DAQ modules, they use the [`iomanager`](https://dune-daq-sw.readthedocs.io/en/latest/packages/iomanager/#connectionid-connectionref). The `iomanager` Sender and Receiver objects needed by a DAQ Module get built in the call to `init` based on the JSON configuration `init` receives . A definition of `init`, then, can look like the following: -``` -void MyDaqModule::init(const data_t& init_data) { - auto ci = appfwk::connection_index(init_data, {"name_of_required_input"}); - m_required_input_ptr = dunedaq::get_iom_receiver(ci["name_of_required_input"])); +```C++ +void MyDaqModule::init(std::shared_ptr p) { + m_cfg = p->get_dal(get_name()); + if ( !m_cfg ) { + throw appfwk::CommandFailed(ERS_HERE, "init", get_name(), "Unable to retrieve configuration object"); + } + + auto inputs = m_cfg->get_inputs(); + for (auto con : inputs) { + if (con->get_data_type() == datatype_to_string ()) { + m_type1_con = con->UID(); + } + if (con->get_data_type() == datatype_to_string()) { + auto iom = iomanager::IOManager::get(); + m_type2_receiver = iom->get_receiver(con->UID()); + } + } } + ``` -In the code above, the call to `connection_index`, defined in [`DAQModuleHelper.cpp`](https://github.com/DUNE-DAQ/appfwk/blob/develop/src/DAQModuleHelper.cpp), returns a map which connects the names of connections with the `ConnectionRef` objects consumed by `IOManager`. It will throw an exception if any provided names don't appear - so in this case, if `name_of_required_input` isn't found in `init_data`, an exception will be thrown. If the name is found, then `m_required_input_ptr`, which here is an `std::shared_ptr_` to a `iomanager::Receiver` of `MyType_t`s, gets pointed to the appropriate `Receiver`. When the DAQ enters the running state, we could have `MyDaqModule` receive `MyType_t` instances from `m_required_input_ptr` for processing. +In the code above, the configuration object is first extracted and then queried for the possible input connections. +The information on the data type transmitted in the connection is used to decide what to use it for. The input of `MyType1` is simply used to store the name of the connection for later usage, while the inptu of `MyType2` is used to directly obtain the receiver socket from the `IOManager`. +Similar operations can be done on the outputs, for example see the [`TRBModule`](https://github.com/DUNE-DAQ/dfmodules/blob/2e9fc856e82cf566c2d38d024960a74cee910e75/plugins/TRBModule.cpp#L110). +Of course in this case operations can be more complicatd because modules with multiple outputs of the same type might require a bit of more logic to organise where to send data. In that case ad-hoc solutions need to be adopted based on configuration schema object that is defined. + +This code of course raises the question: what _is_ `MyDAQModuleConf`? It's a `class`, but rather than being manually written the code for it is generated by the DUNE DAQ build system itself, using a `oks` file schema as input. Initial documentation on OKS can be found [here](https://github.com/DUNE-DAQ/dal/blob/develop/docs/README.md). It's in the schema file that the logical contents of the struct are defined; an example of this type of file can be found [here](https://github.com/DUNE-DAQ/listrev/blob/develop/schema/listrev/listrev.schema.xml). This approach allows automatic compile-time checks on the variable (here `MyDAQModuleConf`) retrieved by the module, reducing the workload on the implementor of `do_conf` or other transitions. +[!!! Here some expert should decide what to do with this comment. Should we keep discussing jsonnet?!?!?]Note also that in fact many functions in a DAQ module, including `init`, can use JSON as input to control their actions, not just `do_conf`. Further details on the generation of code from `jsonnet` files are beyond the scope of appfwk documentation and are instead covered in [this section of the daq-cmake documentation](../daq-cmake/README.md#daq_cmake_schema). ### The `do_conf` function -As one might expect, there are many values which a DAQ module may rely on to perform its calculations when in the running state that ideally should be settable during the `conf` transition. The typical technique is to have some member data which in the DAQ module constructor intentionally gets initialized either to zero or to implausible values (e.g. `m_calibration_scale_factor(-1)`, `m_num_total_warnings(0)`) and then to set them properly during the `config` transition. You'll see in the code below that the type of the data instance `data` which gets extracted from the JSON is `mydaqmodule::Conf`, and then `data` is used to set the member(s). -``` -void MyDaqModule::do_conf(const data_t& conf_data) +As one might expect, there are many values which a DAQ module may rely on to perform its calculations when in the running state that ideally should be settable during the `conf` transition. The typical technique is to have some member data which in the DAQ module constructor intentionally gets initialized either to zero or to implausible values (e.g. `m_calibration_scale_factor(-1)`, `m_num_total_warnings(0)`) and then to set them properly during the `config` transition. You'll see in the code below that the information is extracted from the previously set pointer to our schema generated object and is used to set the member(s). +```C++ +void MyDaqModule::do_conf(const data_t&) { - auto data = conf_data.get(); - - m_calibration_scale_factor = data.calibration_scale_factor; + m_calibration_scale_factor = m_cfg->get_calibration_scale_factor(); // ...and then set the other members which take per-configuration values... } ``` -This of course raises the question: what _is_ `mydaqmodule::Conf`? It's a `struct`, but rather than being manually written the code for it is generated by the DUNE DAQ build system itself, using a `jsonnet` file as input. It's in the `jsonnet` file that the logical contents of the struct are defined; an example of this type of file can be found [here](https://github.com/DUNE-DAQ/listrev/blob/develop/schema/listrev/randomdatalistgenerator.jsonnet). This approach allows automatic compile-time checks on the variable (here `mydaqmodule::Conf`) retrieved by the module, reducing the workload on the implementor of `do_conf`. Note also that in fact many functions in a DAQ module, including `init`, can use JSON as input to control their actions, not just `do_conf`. Further details on the generation of code from `jsonnet` files are beyond the scope of appfwk documentation and are instead covered in [this section of the daq-cmake documentation](../daq-cmake/README.md#daq_cmake_schema). ### The `do_start` function Most DAQ modules are designed to loop over some sort of repeated action when the DAQ enters the running state, and it's in the `do_start` function that this repeated action begins. A very common technique for the `do_start` function is, "Set an atomic boolean stating that we're now in the running state, and then start one or more threads which perform actions in loops which they break out of if they see that the atomic boolean indicates we're no longer in the running state". While it's of course possible to accomplish this using the existing concurrency facilities provided by the C++ Standard Library, the `utilities` package provides a class, `WorkerThread`, which makes this easier. `WorkerThread` is covered in detail [here](https://dune-daq-sw.readthedocs.io/en/latest/packages/utilities/WorkerThread-Usage-Notes/); when in use the `do_start` function can be as simple as follows: -``` +```C++ void MyDaqModule::do_start(const data_t& /*args*/) { m_thread.start_working_thread(); // m_thread is an `utilities::WorkerThread` member of MyDaqModule } @@ -118,12 +142,12 @@ Note that `start_working_thread` takes an optional argument which gives the `Wor ### The `do_stop` function Quite simple, basically the reverse of `do_start`: -``` +```C++ void MyDaqModule::do_stop(const data_t& /*args*/) { m_thread.stop_working_thread(); // m_thread is an `utilities::WorkerThread` member of MyDaqModule } ``` -Note that if your `do_start` function also allocates any resources (hardware, memory, etc.) it should be deallocated here. Also, the queues which send data to your DAQ module should be drained. The idea is that you want your DAQ module to be able to accept a "start" transition after receiving a "stop" transition without anything from the previous run interfering. +Note that if your `do_start` function also allocates any resources (hardware, memory, etc.) it should be deallocated here. Also, the input connections to your DAQ module should be drained. The idea is that you want your DAQ module to be able to accept a "start" transition after receiving a "stop" transition without anything from the previous run interfering. ### The `do_scrap` function @@ -132,10 +156,10 @@ This is the reverse of `do_config`. Often this function isn't even needed since ### The `get_info` function Not yet mentioned, you can see in [`DAQModule.hpp`](https://github.com/DUNE-DAQ/appfwk/blob/develop/include/appfwk/DAQModule.hpp) that there's a virtual function called `get_info` which defaults to a no-op: -``` +```C++ virtual void get_info(opmonlib::InfoCollector& /*ci*/, int /*level*/) { return; }; ``` -It's meant to be implemented by DAQ module writers to supply metrics about the DAQ module; an example of this can be found [here](https://github.com/DUNE-DAQ/dfmodules/blob/develop/plugins/DataWriter.cpp). +It's meant to be implemented by DAQ module writers to supply metrics about the DAQ module; an example of this can be found [here](https://github.com/DUNE-DAQ/dfmodules/blob/develop/plugins/DataWriterModule.cpp). ### The full code @@ -143,11 +167,12 @@ Given the code features described above, `MyDaqModule` would look something like * `MyDaqModule.hpp`: -``` +```C++ class MyDaqModule : public dunedaq::appfwk::DAQModule { public: - alias MyType_t = double; // Pretend this module processes an incoming stream of doubles + using MyType1 = double; // Pretend this module processes an incoming stream of doubles + using MyType2 = int; // Pretend this module processes an incoming stream of int MyDaqModule(const std::string& name) : // A DAQ module instance is meant to have a unique name dunedaq::appfwk::DAQModule(name), @@ -158,9 +183,9 @@ class MyDaqModule : public dunedaq::appfwk::DAQModule { register_command("start", &MyDAQModule::do_start); register_command("stop", &MyDAQModule::do_stop); register_command("scrap", &MyDAQModule::do_scrap); - } + } - void init(const data_t& init_data) override; + void init(std::shared_ptr) override; private: @@ -172,16 +197,32 @@ class MyDaqModule : public dunedaq::appfwk::DAQModule { void do_work(std::atomic&); dunedaq::utilities::WorkerThread m_thread; double m_calibration_scale_factor; - std::shared_ptr> m_required_input_ptr; + const MyDAQModuleConf * m_cfg = nullptr; + std::string m_type1_con; + std::shared_ptr> m_type2_receiver; }; ``` * `MyDaqModule.cpp`: -``` - -void MyDaqModule::init(const data_t& init_data) { - auto ci = appfwk::connection_index(init_data, {"name_of_required_input"}); - m_required_input_ptr = dunedaq::get_iom_receiver(ci["name_of_required_input"])); +```C++ + +void MyDaqModule::init(std::shared_ptr) { + + m_cfg = p->get_dal(get_name()); + if ( !m_cfg ) { + throw appfwk::CommandFailed(ERS_HERE, "init", get_name(), "Unable to retrieve configuration object"); + } + + auto inputs = m_cfg->get_inputs(); + for (auto con : inputs) { + if (con->get_data_type() == datatype_to_string ()) { + m_type1_con = con->UID(); + } + if (con->get_data_type() == datatype_to_string()) { + auto iom = iomanager::IOManager::get(); + m_type2_receiver = iom->get_receiver(con->UID()); + } + } } void MyDaqModule::do_conf(const data_t& conf_data) @@ -221,7 +262,7 @@ Now that you've been given an overview of appfwk and how to write DAQ modules, y ### API Diagram -![Class Diagrams](https://github.com/DUNE-DAQ/appfwk/raw/develop/docs/appfwk.png) +[!!! Here we need to remake this diagram]![Class Diagrams](https://github.com/DUNE-DAQ/appfwk/raw/develop/docs/appfwk.png) ----- @@ -230,9 +271,9 @@ Now that you've been given an overview of appfwk and how to write DAQ modules, y _Last git commit to the markdown source of this page:_ -_Author: eflumerf_ +_Author: Eric Flumerfelt_ -_Date: Fri Jan 20 15:06:29 2023 -0600_ +_Date: Wed Jul 23 13:39:53 2025 -0500_ _If you see a problem with the documentation on this page, please file an Issue at [https://github.com/DUNE-DAQ/appfwk/issues](https://github.com/DUNE-DAQ/appfwk/issues)_ diff --git a/docs/packages/appfwk/appfwk.drawio b/docs/packages/appfwk/appfwk.drawio index e4bbbb12a27..b8c33d09305 100755 --- a/docs/packages/appfwk/appfwk.drawio +++ b/docs/packages/appfwk/appfwk.drawio @@ -1 +1,946 @@ -7Z1Zb+O2FoB/TQBPgQwsr8ljls40uDPFTNLpRe+LoUi0zUYSFUpy4nnob79cRFnLkZfEkpiUQDGNaYmyyI+HPAsPT4ZX/vNnaofLr8RF3smg7z6fDK9PBoNBfzJg/+Mla1liDQZjWbKg2E3LNgV3+CdKC/tpaYJdFBUujAnxYhwWCx0SBMiJC2U2peSpeNmceMWnhvYCVQruHNurlv4Xu/FSlp6N+5vy3xBeLNWTrX76jW+ri9OCaGm75ClXNPz1ZHhFCYnlX/7zFfJ466l2kfd9qvk2+2EUBfE+N+Cf6+Xp5HY1JvfXj3Tin97Gj6eqmVe2l6RvfDKYeKzCy3v2x4L/cRGGHnbsGJMg+46qL1UJe+79pky8b7xWjRg9Yd+zA/bpck6C+C79ps8+2x5eBOxvh70FoqxghWjMnuZdpF/EJGSlzhJ77hd7TRL+rlFsOw/q0+WSUPyTVWt77CuLFbCvaZyiNOwXrrjjd6aPpihi13xTDWiVir7az4ULv9hRnBY4xPPsMML32Wv4Nl3g4JLEMfHTomXsqx/EX/qT7WOPj4LfkLdC/BVVa8gfao3EL6fkIQNtwK/AnndFPMKa5tpFczvxNi2c7/sUB9566DlXlLLwGREfxXTNLsnGprxjXfz4lIN8lJYtc4CPxmfp4EoH1iKrecMe+yPF7xAUrQqKp/7MT8T7XAigXP7H8EKWlSFjZXGx2WVrqtYLiAQw16BpkWLQQ/MYINDHrssfchmFtoODxRdx2fVoU3Kbtg8vIuz2uScG+pLdiAIOEInt2N7QEhIcxKL9xpfsP9bMV/2P45Mxe40r9tnafGb/8ctpfEUC9jo2Fv2NGIpPiOO4P1pVZLYLhN0gpeQM9yRHEXZ8cAYQOOxNYyxkVhEe1oqsxww9mtAz7pyeIUQPDuZk5i+ohIeEPgk8fC8JumHffbUDNvNTg5EmGE07x2gEYcRWIjEyAkhncs47J2cMkXOfROsSODZbW2KHLbJtny+J5UKc6UJZCbz4NmB1A5ZldU7WBCILMa2YGrTeNFrdr7nP4FUTW3Gzrv2JXAmYZMhQowc13a+1zyFqaBLMhMlmFmO/vFpylpQERP7Nv56JLilKKvDiKEa2u545HnEejBjTE8jOV+2q4gqQXP+TJFIHzyVRuWKpBBqQdAGp80X8ADRezhPPW88eE9a7c4zcWWBX5JvRBrUiadD5on0AWjN94m7MUdcX378SN/GQsUJpRk+b6/L198mK3P3ncfb4+O3T95/2NfqxOrXG4MLc8d3Z3HbKsmfJmsCdhTEtrqfY1Zmx84r4vh24n1jnejhem5WUnuC1ubSvAQ9c2zskmB9AHmvhuUFNb9Q6X7Rb0wppFTo8LLq66E+3jsLDHzwg4frUqhAyrBIyBGjw7HvkfSNR6py8pvLaEiVvAIR9F91HCBiARc4ECl7Jh6v07DDctui+ki0j/cTqQvAqOSViP/TqrlA+wm3XcGkIf//ByDdtsD7reiqdVJVJxjU3rvY+SJJXhNVgiNGDmKHVPTFVpZERQ5OghwKmONr0gcfWXRzs70k/yX8NexqyN+yevWoADWMPPSMniVGPa54ua39uneetyC4MPLJkOmUgUfw7EmGlBjTNQRt3D1o1xIaBtrDjJaIi0ibqYa5oVqO1GAkecmJCS6DJVZmHVogvyVirG/I0JG/aPXnVEB2+IIuEXY0VYvc1cs44yXWiTYPlfzVsh9EWoViGE/YiSH00cks3kkYaqAWQpexykZH0wTgjtSao+8U9wE9pd1jVGWm2iJ28jy1i58rAWr9HbNqHcJw2ZfIFfePcMGaCDo8tw7bKg9dsDoORaUqCgbt7fCGwZr4dlncWshI4tDBvxt/pxsxkYo09zXg0NUIUcJ23i2g1YqOChnFnNk0B4NUGKWhqZgNtW+W1Vc+4CXXBpboyb1dowBYptRLqGZP7m0cMMES1ixhohspt8ekV7Jf9lDUDjw7wWIDxqV16QNOT4yE7SEITxqAhMYCxqV1iqgGkOT+y2kmvZrSKYgY49hzfPfwO461+BywDzup2WQbzahlf9XvnrnO1AEqixZdsqckr6vlRiJwoJ9/sMJTwMU3zTnxnxJvumHWuGkApt1zMuiZ2ljPWwzM//ctb97CYhHNbiuS0zPcW+e6Nq0yqh0/vZqJ+8yQPOldToPRfGcn2PGbTtY/oAhmMDcZb9n12jvEE2PfJAz3YvM/3tUSz+zWPVsNuEyCX40hWxQUs7EkzvjANQe5ecYIcIXmOajytrLVCRD8uw/Agn6sJO9E/7GQCxZ1MACjPGws7geKgjIv2lcJptDcYW4JJIA4aw6Cq9XCbtsw8z3eQ4sBFzz2mb5P7vw9Z0sk5l6LHBFMRxfSambQvfln/ZHp5Mr0uz8yHBriYuVrj4QDM1dBwaGyuHoA7v3LjgaJ59ILhkEKbWaaushpv0RyxdnTYeta4GnXhEFoztsshGDyT4zDhak8RQwHWnoI5TQzwMl3IxNRrBStkoW8XVsgxXhdTb4Lp361WMxxVV7NnoKo9bGw9C+cKkxm/otetHEMb1y5bUV3+Vni5Wah3ngROehJRrgZuDO0p+cwNqrO4Ipa3mgXMArdxWZ0JvledLNPu5gFoYWH0/rY5gJLbtRmbPQAjk7JJugcnjjKx2trwA2yjbFeOgNFA/2wIMqzowgqUbKJdVuqTf+1Y4Bi3sG4wQbkk2oUJtFinMbVqpd87yL5xtEAHkGL+G/OGa4OzRjhDmwpa5hm0OPM4B6W1VnJavMSDYrayaIUdlAajXezA3XZLOzqGCN3wtp9l2Wy30pHR7nUMcLseF4380Jqeg1+x24A/22w40JK77vUVyJ8xpGiBIx48+woByY9d2suweyUfj12Z1iqqE6oH2p1LPykXV8Ebc/KYEH7dxe9/bT6I783aVceB0r0uBuX5kM68ZOPJu/TtB5Tu1umFXrLA9YnpFeLA2MHsCztw0GH3VnzXB+e3Ub7J3BuZAaDFAIC2WrQ7AEaQFbTk+S4dAGOc3+/D+a2YUgZ5wPc9GQMsZhaH48NoDpE5vlTKRvhrXLwgCI1hALr28lKol1BsnHsaE1Sd1kCCmsvEBDv3CgwZXHTBBbDTtIwL6N/jdhqu3uZPrGo21YkUawdZHqFwWYO1FlgDZqCWsYY8jTXarToUcsv8+koFdds5kkZH1ZZiwEbTMsWQf1ECw/onKFCibH68dU6f0l8nxCShPlcPL/N2wUnRqAhsZY2cJfLtWlrl84t68EYz5ntlKorw0g75n3PiuULbzavDHGrGUabVWH1Z8Jtq5mFakFMyUqhzw+KeJIGL3C/3qkDoJTfBb8gWjxRlDleGmSq1+VxWaClyEhrhFboVym9aegS9c6xm20xMVvUN9urnAGLnTTEG7NSvmEFuAhwb84eu5o/tomNvOIdFo4g69TGH5nAA2kQaIxNUhtPMUVK2bZJEmSmzgSkzkw2vMaCA1DSX5xNUfx8TlChmMPHTo0HEmu27+Mrs39QGISC8vmWEQJV4s38T5mizK9jApA9MQB79lmGCFNHLJBJqpwBmxZTCWYToCjtIknX3hGNnaRjShCHgYPl2GbKAONMkkmaJb8In/zs30amQkLLtAlLQ1O03rNFffPMXrl296E5FeHqriCHcdv1mnVd+GBRPm15d0l231V9J6TBPczrs8zggHwTw6BptOCAxGkCq69MSx+iODT9+1RPlZwMVhr4kfzCu1ZtKg74xvWE6Pd+pOFiQ4jBqarycg0m7Kp59oNOMhvsv0nAH/T1V3GljoIKB3Km/ZSNgzTqggXXA+cG5OjrXbc/BmOpEZNCsKiQ/sGsUEW3g6VyrtcDsAaVZcbNyMjPhe54JR4NxYSqc7jkTNuaFsOA9TTLiWIq3vKZjhFoDQm0jId7OlGjB+4xwmlMtr+AaZvRgRoOZEDwMLN0PfiKil7Jdade81LCjCTudm3OtIeSUrK6ijMf8X7iKGlh7LqPGjeEJbmoRviuR/VbKN5PAtlUpd743V9qsqsB9CCU8UOBeUCr6xcW2TwL3jyVfq1+yLz5h/kDRNexT2mqDUZEmKk3eJ3XhVVtHaUQS6qB9Zhcm3xZoW5XKI4TcBdraPfkoLZU+kyLPjvEqfyPcIWl13zipOZOk0qhU16uddKoK+Z7pXZturVQ07u+oSDZDpSLBR/aOr0AG2HfwS5UZ1sTSqFkgYee8lkkMMbvUDvzrA4blqG5YilBfLh/Tx5yk46x2Gjjtf5yOreJUoA5QfyUe6sgUVWupT8l8LnaRN9Gd1SAI69/Rnf2Pk9FZsTeP05kqO7TqTIVMG70JWVfeljw/31ecZxNuF/J8NCySYw0mL5Pnw1J+5kpFjcvzqmFFa3meIa61PB+WRMCoPRFQNXpoLdCP15+NCfTs3NGsO622uhMU6JUQB5Hr5gsO0A3vt5Ai3nvGKqGpVeK1aQyyk/HyeiN0mITV2MlkwKTBM9MhL5zx46SYho3cQto4cU1/bnuRsdA3Y3+olaMHmR9aPZMEmKoYRbmtyCfADmGVDyu/yS37y5ClBVmQ66ddsqDNbpfsTWOxqdDw9bb5gtxD7fIFbllLkw5me8xnxURqhra3SRu0L6BV2iZVk6uIfZgTtZ/EkPYuSMv22HaHWt3ZmnOD2vtCrfPV/6TqeWCokXiJ6IyEuQ2YRzuV2MCnC3ydKwiTAQBfiQ6Th7F5EPZdyTdlyJrUH4uazx/FXj1CPZsuHCZphG2Y5zJjn1c8mdnSpr/88kHF3Gwzxpr0T1pCWM3/1K40ssBsFmWfTdN+W+Um2uWhVbFneQ8t/rlenk5uV2Nyf/1IJ/7pbfx4qlazXThop6U4mVF5S8O+DtqzyY6KmnbQgmkqdGVDRSPt9t53iIZV7NEspOpgNKY7Kmocjaq2prWvd1rn+nqBr3dsnZ0dBQdrUuhE5bpvwVU/re7U0zr04njdd9r/OJr0i0ESw+P0ZjGSrr3QKzABjK5i2noLM3g5e385Ad3eYvpsR0XHE9P4fw9/jv/8vvhrtUDfb37Q2R/h59Pq/J0qCazw19s7/u+zg3IWj3LgxqXtXnn4R2QvUC+zsT0t7fhD9dqbQBxJdCeyeu68+keAnkM2bpGb/qTNHTy1c3pgXFYHAapIb2TLe7a6d+I/MeG9ToLis0tV7Vs5e3M2LnJvzZQfxC2P0A+xg4DE37imxjdzbe6hTNsgAfjTN+l6r9hV/Gd/stkTcs2QUFzfyrwCvEiouPPH7c2u2wrX3zKsMGL17LorO/bmq9z2/zuJ+bYjzH7BT1TsMezucf+Fx97VXefrqO/Xa8xUxNhhYmpRbhrxvFI3plkba97dw07Mrkur/prW+/IaN6p2FCU5SrIQglJVKOJjCKjqIgzZbxPdUte6NdSpEhevCrIeMn0X76jN7HtvOw8LIelPHanny5PwxI8Cq8xwrI7hSnvK8x9FWRqbgygtfL5PonX2pjUpgFmxeOO6KLJush4BC5PK7Fi7JrHOSnrB3n6/syPYBcCZo7qkyB2Btd/ckd1QJ+Dy7pwiKYXDvAD0P6MAUdvLnlAahHViWkJaurg6IWy//VYc9EfBN9q7rh/BQ0Cegj2mvpoK1GTLh9Qet7/HQTIaAKPkDBglR0kDBo6S+uTpucTWTEmiCRsnXEExkbEaRcYeF8cJ4EGfgjhOj+BSAHGsKvWn/uzJxvHMWZUcmkxrdmVA2sqmWCjPxinwAqfADobqpcYhsbEgRsdwCYA/rxpgrSjyE/GuBZBkmWFHC3YA53a77OyxCdD4tpumAPBsgxQ0NQ2BsfWFdZA5Hk4bVqoO6HYlBhgu/4+hRUtagOOq2qUFDH7HCpaeOYdeO2Qs4BTudplRelkNNLMIeXNDjobkAAfdtkwOFP8yxPm5SShGPaEdual3JX+2o7OkJCCppoQdSiLENe9IWumkaq408fx9dkzY5cV4Y2kjLx2WVz3m1MVUZt5i1VS3wH7I7401nGvBOXBC77E4Zx8p4Q6bjVeZ2uHyK3F5VMev/wc= \ No newline at end of file + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/docs/packages/appmodel/DFApplication.png b/docs/packages/appmodel/DFApplication.png new file mode 100644 index 00000000000..b40b409798c Binary files /dev/null and b/docs/packages/appmodel/DFApplication.png differ diff --git a/docs/packages/appmodel/README.md b/docs/packages/appmodel/README.md new file mode 100644 index 00000000000..f108611913b --- /dev/null +++ b/docs/packages/appmodel/README.md @@ -0,0 +1,89 @@ +# Appmodel + + This package extends the schema from the confmodel package +to describe readout, dataflow and trigger applications. + +## SmartDaqApplication + +![SmartDaqApplication schema class with inherited apps](apps.png) + + + **SmartDaqApplication** is an abstract class where the modules +relationship will normally be left empty with the **DaqModules** themselves +being generated on the fly by an implementation of the +`generate_modules()` method. The **SmartDaqApplication** has +relationships to **QueueConnectionRules** and + +**NetworkConnectionRules** to allow the `generate_modules()` method to +know how to connect the modules internally and to network endpoints. + +The `generate_modules` method is a pure virtual function that must be implemented for each **SmartDaqApplication**. It should populate the modules relationship of the **DaqApplication** and call `conffwk::update()` so that subsequent calls to `get_modules` will return the newly created objects. + +Readout, HSI, Hermes Dataflow and Trigger applications extend from **SmartDaqApplication** + +## ReadoutApplication + + ![ReadoutApplication schema class diagram not including classes whose + objects are generated on the fly](roApp.png) + + The **ReadoutApplication** inherits from **SmartDaqApplication** and provides +a `generate_modules()` method which will +generate a **DataReaderModule** for each **DetectorToDaqConnection** associated with the application via the `detector_connections` relationship, and set of **DataHandlerModule** objects, i.e. **DLH** for each + +**DetectorStream** plus a single **TPHandlerModule** (FIXME: this shall become a TPHandler per detector plane). + + Optionally **DataRecorderModule** modules may be created (not supported yet)). The modules are created +according to the configuration given by the data_reader, link_handler, data_recorder +and tp_handler relationships respectively. + + Connections between pairs +of modules are configured according to the `queue_rules` relationship +inherited from **SmartDaqApplication**. + +### Far Detector schema extensions + +![Class extensions for far detector](fd_customizations.png) + +Several OKS classes have far detector specific customisations, as shown in blue the above diagram. + +## DataFlow applications + + ![DFApplication](DFApplication.png) + +The Datflow applications, which are also **SmartDaqApplication** which +generate **DaqModules** on the fly, are also included here. + +## Trigger applications + + ![Trigger](trigger.png) + +The Trigger applications, which are also **SmartDaqApplication** which +generate **DaqModules** on the fly, are also included here. + +## WIEC application + + ![WIEC](wiec_app.png) + +The WIEC application is a **SmartDaqApplication** which generates **HermesModule** modules , and **WIBModules**, on the fly. + +## Testing SmartDaqApplication module generation + +This package also provides a program `generate_modules_test` for +testing the `generate_modules` method of **SmartDaqApplication**s. It reads +a configuration from an OKS database, generates the DaqModules for the +requested SmartDaqApplication and prints a summary of the DaqModules +and Connections. + + +----- + + +_Last git commit to the markdown source of this page:_ + + +_Author: Gordon Crone_ + +_Date: Thu Sep 4 16:44:30 2025 +0100_ + +_If you see a problem with the documentation on this page, please file an Issue at [https://github.com/DUNE-DAQ/appmodel/issues](https://github.com/DUNE-DAQ/appmodel/issues)_ + diff --git a/docs/packages/appmodel/SmartDaqApplication.md b/docs/packages/appmodel/SmartDaqApplication.md new file mode 100644 index 00000000000..9be431c5be2 --- /dev/null +++ b/docs/packages/appmodel/SmartDaqApplication.md @@ -0,0 +1,100 @@ +# SmartDaqApplication + +The SmartDaqApplication class allows for automatic creation of modules and connections for a known application. The general pattern is that a SDA ingests a set of module configuration objects and connection rules and uses them to create a well-defined application with modules, internal connections, and external connections. + +## Writing a new SmartDaqApplication + +SmartDaqApplications implement the `std::vector generate_modules(const comnfmodel::Session*)` method, which is responsible for generating a set of modules and connection objects. Each SmartDaqApplication has a UID from the configuration. + +This section will use the "[DFOApplication](https://github.com/DUNE-DAQ/appmodel/blob/develop/src/DFOApplication.cpp)" SmartDaqApplication as an example. + + +## ConfigObjectFactory +`ConfigObjectFactory` is an helper class to simplify the creation of `appfwk` configuration objects in `SmartApplication`. +Once instantiated at the start of `generate_modules`, it offers a set of methods to facilitate the creation of configurarion objects, queues and network connections. + + +### Creating a module + +```C++ + + ConfigObjectFactory obj_fac(this): + + TLOG_DEBUG(7) << "creating OKS configuration object for DFOModule class "; + conffwk::ConfigObject dfoObj = obj_fac.create("DFOModule", "DFO-"+UID()); + + auto dfoConf = get_dfo(); + dfoObj.set_obj("configuration", &dfoConf->config_object()); +``` + +Here, it is important to understand the DFOApplication schema definition: +```XML + + + + + + + +``` +In addition to the fields from SmartDaqApplication, the DFOApplication class has a relationship named "dfo" to a `DFOConf` object. As an OKS object, it also has a "UID"" field. The code uses this UID (accessed via the `UID()` method) to create the UID for a `DFOModule` object. The object is created in the in-memory database, and its configuration assigned using the "dfo" relationship from the DFOApplication schema. + +### Reading connection rules and creating connections + +```C++ + + for (auto rule : get_network_rules()) { + auto endpoint_class = rule->get_endpoint_class(); + auto descriptor = rule->get_descriptor(); + + conffwk::ConfigObject connObj = obj_fac.create_net_obj(descriptor); + + if (descriptor->get_data_type() == "TriggerDecision") { + tdInObj = connObj; + input_conns.push_back(&tdInObj); + } + else if (descriptor->get_data_type() == "TriggerDecisionToken") { + tokenInObj = connObj; + input_conns.push_back(&tokenInObj); + } + + else if (descriptor->get_data_type() == "TriggerInhibit") { + busyOutObj = connObj; + output_conns.push_back(&busyOutObj); + } + } + +``` + +The next stage of DFOApplication is to retrieve the network connection rules to assign the inputs and outputs of the `DFOModule` instance. A DFO has two fixed inputs (decisions and tokens), and one fixed output (inhibits). Decisions sent to TRB instances are dynamically instantiated at run-time using information in the token messages. + +### Setting Module Connection relationships + +```C++ + dfoObj.set_objs("inputs", input_conns); + dfoObj.set_objs("outputs", output_conns); + + // Add to our list of modules to return + modules.push_back(confdb->get(dfoUid)); + + return modules; +``` + +Once the fixed connections are retrieved using the network rules, the module's input and output relations are set, and the module is added to the output vector, which is returned. + +### Summary + +These basic steps are repeated in all SmartDaqApplication instances, with differences depending on the specific applciation being implemented. The DFOApplication is one of the simplest applications in the system, but it demonstrates the basic logic followed by all SmartDaqApplications. + +----- + + +_Last git commit to the markdown source of this page:_ + + +_Author: Alessandro Thea_ + +_Date: Tue May 20 23:32:21 2025 +0200_ + +_If you see a problem with the documentation on this page, please file an Issue at [https://github.com/DUNE-DAQ/appmodel/issues](https://github.com/DUNE-DAQ/appmodel/issues)_ + diff --git a/docs/packages/appmodel/apps.png b/docs/packages/appmodel/apps.png new file mode 100644 index 00000000000..0df9d8b182d Binary files /dev/null and b/docs/packages/appmodel/apps.png differ diff --git a/docs/packages/appmodel/fd_customizations.png b/docs/packages/appmodel/fd_customizations.png new file mode 100644 index 00000000000..91281306311 Binary files /dev/null and b/docs/packages/appmodel/fd_customizations.png differ diff --git a/docs/packages/appmodel/readout.png b/docs/packages/appmodel/readout.png new file mode 100644 index 00000000000..6fc3cfd120e Binary files /dev/null and b/docs/packages/appmodel/readout.png differ diff --git a/docs/packages/appmodel/roApp.png b/docs/packages/appmodel/roApp.png new file mode 100644 index 00000000000..bf55737c538 Binary files /dev/null and b/docs/packages/appmodel/roApp.png differ diff --git a/docs/packages/appmodel/trigger.png b/docs/packages/appmodel/trigger.png new file mode 100644 index 00000000000..9d5dac75b65 Binary files /dev/null and b/docs/packages/appmodel/trigger.png differ diff --git a/docs/packages/appmodel/wiec_app.png b/docs/packages/appmodel/wiec_app.png new file mode 100644 index 00000000000..2fc67f6d1af Binary files /dev/null and b/docs/packages/appmodel/wiec_app.png differ diff --git a/docs/packages/asiolibs/README.md b/docs/packages/asiolibs/README.md new file mode 100644 index 00000000000..fe4b8451733 --- /dev/null +++ b/docs/packages/asiolibs/README.md @@ -0,0 +1,48 @@ +# Asiolibs + +Boost.Asio-based socket reader plugin for low-bandwidth devices + +# Example usage + +`local-crt-bern1x1-config` and `local-crt-grenoble-1x1-config` (defined in `daqsystemtest/config/daqsystemtest/example-configs.data.xml`) are session configurations with a CRT reader application accompanied by a socket reader application. + +CRT reader application includes a data reader (either `CRTBernReaderModule` or `CRTGrenobleReaderModule`) which reads data from the hardware then puts it into a queue and data writers (`SocketWriterModule`) which read data from the queue then send it over a socket. + +Socket reader application includes a data reader (`SocketReaderModule`) which reads data from the socket (`CRTBernFrame`/`CRTGrenobleFrame`) then puts it into another queue to be processed by `DataHandlingModel`. + +![crt-reader-and-readout-apps](crt-reader-and-readout-apps.png) + +![local-crt-bern-1x1-config](local-crt-bern-1x1-config.svg) + +![local-crt-grenoble-1x1-config](local-crt-grenoble-1x1-config.svg) + +## How to run + +``` +drunc-unified-shell ssh-standalone config/daqsystemtest/example-configs.data.xml local-crt-bern-1x1-config uname-local-test + +drunc-unified-shell ssh-standalone config/daqsystemtest/example-configs.data.xml local-crt-grenoble-1x1-config uname-local-test +``` + +The following table includes relevant configuration details that can be set by the user. Users can either configure TCP or UDP as the socket type. + +| Configuration | Can be changed from | Object ID/Attribute name | +| ---------------- | ------------------- | ---------------- | +| Local IP | config/daqsystemtest/moduleconfs.data.xml | def-socket-reader-conf/local_ip +| Remote IP | config/daqsystemtest/moduleconfs.data.xml | def-socket-writer-conf/remote_ip +| Port | config/daqsystemtest/ru-segment.data.xml | socket_wib_101_link0/port | +| Socket type | config/daqsystemtest/moduleconfs.data.xml | def-socket-reader-conf/socket_type
def-socket-writer-conf/socket_type | + + +----- + + +_Last git commit to the markdown source of this page:_ + + +_Author: Deniz Tuana Ergonul Uzun_ + +_Date: Wed Jun 11 11:51:26 2025 +0200_ + +_If you see a problem with the documentation on this page, please file an Issue at [https://github.com/DUNE-DAQ/asiolibs/issues](https://github.com/DUNE-DAQ/asiolibs/issues)_ + diff --git a/docs/packages/asiolibs/crt-reader-and-readout-apps.png b/docs/packages/asiolibs/crt-reader-and-readout-apps.png new file mode 100644 index 00000000000..405a0f4ed40 Binary files /dev/null and b/docs/packages/asiolibs/crt-reader-and-readout-apps.png differ diff --git a/docs/packages/asiolibs/local-crt-bern-1x1-config.svg b/docs/packages/asiolibs/local-crt-bern-1x1-config.svg new file mode 100644 index 00000000000..277afa7a654 --- /dev/null +++ b/docs/packages/asiolibs/local-crt-bern-1x1-config.svg @@ -0,0 +1,279 @@ + + + + + + +G + + + +0 + +crt-bern-root-segment +Segment + + + +4 + +hsi-fake-segment +Segment + + + +0->4 + + + + +5 + +trg-segment +Segment + + + +0->5 + + + + +6 + +df-segment +Segment + + + +0->6 + + + + +9 + +crt-bern-segment +Segment + + + +0->9 + + + + +1 + +local-crt-bern-1x1-config +Session + + + +1->0 + + + + +2 + +hsi-fake-to-tc-app +HSIEventToTCApplication + + + +10 + +mlt +MLTApplication + + + +2->10 + + +tcs_.* + + + +3 + +hsi-fake-01 +FakeHSIApplication + + + +3->2 + + +hsi_event + + + +7 + +df-01 +DFApplication + + + +3->7 + + +fragments_df-01 + + + +4->2 + + + + +4->3 + + + + +5->10 + + + + +6->7 + + + + +8 + +dfo-01 +DFOApplication + + + +6->8 + + + + +7->3 + + +data_requests_for_hsi-fake-01 + + + +7->8 + + +dataflow_token + + + +7->10 + + +data_requests_for_mlt + + + +11 + +crt-bern-ru-01 +ReadoutApplication + + + +7->11 + + +data_requests_for_crt-bern-ru-01 + + + +8->7 + + +trigger_decision_df-01 + + + +8->10 + + +trigger_inhibit + + + +9->11 + + + + +12 + +crt-bern-01 +CRTReaderApplication + + + +9->12 + + + + +10->7 + + +fragments_df-01 + + + +10->8 + + +td_mlt_dfo + + + +10->10 + + +tcs_.* + + + +11->7 + + +fragments_df-01 + + + +11->10 + + +time_sync_.* + + + +legendA +black: session + + + +legendB +brown: segment + + + + +legendC +blue: application + + + + diff --git a/docs/packages/asiolibs/local-crt-grenoble-1x1-config.svg b/docs/packages/asiolibs/local-crt-grenoble-1x1-config.svg new file mode 100644 index 00000000000..d3c039226b1 --- /dev/null +++ b/docs/packages/asiolibs/local-crt-grenoble-1x1-config.svg @@ -0,0 +1,279 @@ + + + + + + +G + + + +0 + +hsi-fake-segment +Segment + + + +7 + +hsi-fake-to-tc-app +HSIEventToTCApplication + + + +0->7 + + + + +8 + +hsi-fake-01 +FakeHSIApplication + + + +0->8 + + + + +1 + +trg-segment +Segment + + + +2 + +mlt +MLTApplication + + + +1->2 + + + + +2->2 + + +tcs_.* + + + +6 + +dfo-01 +DFOApplication + + + +2->6 + + +td_mlt_dfo + + + +10 + +df-01 +DFApplication + + + +2->10 + + +fragments_df-01 + + + +3 + +local-crt-grenoble-1x1-config +Session + + + +5 + +crt-grenoble-root-segment +Segment + + + +3->5 + + + + +4 + +df-segment +Segment + + + +4->6 + + + + +4->10 + + + + +5->0 + + + + +5->1 + + + + +5->4 + + + + +9 + +crt-grenoble-segment +Segment + + + +5->9 + + + + +6->2 + + +trigger_inhibit + + + +6->10 + + +trigger_decision_df-01 + + + +7->2 + + +tcs_.* + + + +8->7 + + +hsi_event + + + +8->10 + + +fragments_df-01 + + + +11 + +crt-grenoble-ru-01 +ReadoutApplication + + + +9->11 + + + + +12 + +crt-grenoble-01 +CRTReaderApplication + + + +9->12 + + + + +10->2 + + +data_requests_for_mlt + + + +10->6 + + +dataflow_token + + + +10->8 + + +data_requests_for_hsi-fake-01 + + + +10->11 + + +data_requests_for_crt-grenoble-ru-01 + + + +11->2 + + +time_sync_.* + + + +11->10 + + +fragments_df-01 + + + +legendA +black: session + + + +legendB +brown: segment + + + + +legendC +blue: application + + + + diff --git a/docs/packages/asiolibs/local-socket-1x1-config.svg b/docs/packages/asiolibs/local-socket-1x1-config.svg new file mode 100644 index 00000000000..beb602235aa --- /dev/null +++ b/docs/packages/asiolibs/local-socket-1x1-config.svg @@ -0,0 +1,279 @@ + + + + + + +G + + + +0 + +local-socket-1x1-config +Session + + + +3 + +socket-root-segment +Segment + + + +0->3 + + + + +1 + +hsi-fake-to-tc-app +HSIEventToTCApplication + + + +4 + +mlt +MLTApplication + + + +1->4 + + +tcs_.* + + + +2 + +hsi-fake-01 +FakeHSIApplication + + + +2->1 + + +hsi_event + + + +6 + +df-01 +DFApplication + + + +2->6 + + +fragments_df-01 + + + +5 + +df-segment +Segment + + + +3->5 + + + + +8 + +hsi-fake-segment +Segment + + + +3->8 + + + + +9 + +trg-segment +Segment + + + +3->9 + + + + +10 + +socket-ru-segment +Segment + + + +3->10 + + + + +4->4 + + +tcs_.* + + + +4->6 + + +fragments_df-01 + + + +7 + +dfo-01 +DFOApplication + + + +4->7 + + +td_mlt_dfo + + + +5->6 + + + + +5->7 + + + + +6->2 + + +data_requests_for_hsi-fake-01 + + + +6->4 + + +data_requests_for_mlt + + + +6->7 + + +dataflow_token + + + +11 + +socket-ru-01 +ReadoutApplication + + + +6->11 + + +data_requests_for_socket-ru-01 + + + +7->4 + + +trigger_inhibit + + + +7->6 + + +trigger_decision_df-01 + + + +8->1 + + + + +8->2 + + + + +9->4 + + + + +10->11 + + + + +12 + +ss-01 +SocketSenderApplication + + + +10->12 + + + + +11->4 + + +time_sync_.* + + + +11->6 + + +fragments_df-01 + + + +legendA +black: session + + + +legendB +brown: segment + + + + +legendC +blue: application + + + + diff --git a/docs/packages/confmodel/README.md b/docs/packages/confmodel/README.md new file mode 100644 index 00000000000..84b3ca9f223 --- /dev/null +++ b/docs/packages/confmodel/README.md @@ -0,0 +1,148 @@ +# confmodel +This package contains the core' schema for the DUNE daq OKS configuration. + + ![schema](schema.png) + +The top level of the schema is the **Session** which defines some global +DAQ parameters and has a relationship to a single top-level **Segment**. +It also has a list of disabled [Resources](#resources-and-resourcesets). It is intended that parts of +the DAQ system that are not required in the current run are simply +disabled rather than deleted from the database altogether. + +A **Segment** is a logical grouping of applications which +are controlled by a single controller (**RCApplication**). A **Segment** may contain other +nested **Segment**s. A **Segment** is a Resource that can be enabled/disabled [(see below)](#resources-and-resourcesets), +disabling a **Segment** disables all of its nested **Segment**s. + +The **Application** class has attributes defining the application's + `application_name` (executable name) and `commandline_parameters`. Its + `application_environment` relationship lists environment variables needed by the + application in addition to those defined by the **Session**. + +## Resources and ResourceSets + + +**Resource** is an abstract class describing an item that can be +disabled directly. It has the method `is_disabled(const dunedaq::confmodel::ResourceTree& session)` which can be called +by application code to determine if the object should be considered +disabled for this session (Session is a subclass of ResourceTree). The [disabling logic](#the-resource-disabled-logic) calls the virtual +`compute_disabled_state(const std::set& disabled_resources)` method to determine the state of the Resource, the disabled_resources argument is a list of UIDs of all the Resources that have been disabled so far. The +implementation provided by the base class just checks that the object +itself is not in the list of disabled objects. Derived classes can +re-implement this method with whatever logic is needed to determine the +state of the object, for example the **ResourceSetDisableAND** class +provides an implementation that ANDs together the state of all of its +contained objects. + + +**ResourceSet** is an abstract container of **Resource**s which can be disabled together. It +is itself a Resource (so can be nested). It defines a pure virtual method `contained_resources()` which returns a vector of pointers to 'contained' resources. Developers should implement this method to extract any resources that need to be considered for determining the disabled state of the set from among the class's relationships. The class may have relationships to other Resource derived +objects that will be ignored for the disabled check. + + +**ResourceSetDisableAND** is a container of **Resource**s which will +be disabled if *all* of its **Resource**s are disabled. It provides a +final implementation of the ResourceSet::compute_disabled_state() method. + + +**ResourceSetDisableOR** is a container of **Resource**s which +provides a final implementation of the ResourceSet::compute_disabled_state() +method returning true if *any* of its contained **Resource**s are +disabled. + + +**Segment** is a container of **Segment**s and **Applications** +which inherits from **ResourceSetDisableAND** so it can be disabled +directly or indirectly if all its components are disabled. + + ![Resource tree](resourcetree.png) + +### The Resource disabled logic + +The Resource disabled logic works on a single tree of **ResourceSets**. +It is held by the virtual class **ResourceTree** currently **Session** +is the only concrete class derived from it. +The **ResourceTree** holds a **DisabledResources** object which is initialised with a reference to the root **Segment** +and the list of disabled resources from its `disabled` relationship. + +⚠️**Any ResourceSet that is not referenced by a ResourceSet in the tree +starting at the Session's segment relationship will not be considered +by the disabling logic!** + +The **DisabledResources** constructor will configure itself using the +tree of Resources and initial list of disabled Resources. +To start with, the UID of each member of the list is inserted into a +set and any 'contained' (using the `contained_resources()` method) Resources +are also disabled. + +A list of all ResourceSets in the tree is generated by recursively +calling `contained_resources()` and iterating over all the ResourceSets. +Then it iterates over the list of **ResourceSet**s. If a ResourceSet +is not currently in the disabled set, it will call the `compute_disabled_state()` +method to see if its state has been changed by the current content of +the disabled set. It will repeat this procedure until an iteration +that ends with the same number of disabled resources it started with. + + +## Readout Map + + ![ReadoutMap schema](ReadoutMap.png) + +(the blue classes in the diagram are not part of confmodel and are +there to show how the other parts fit together) + +The readout map is defined in terms of **DetectorStream** objects +which define a one to one mapping between a source_id and a + + **GeoID" object. A collection of streams are +aggregated into a **DetDataSender** and a group of **DetDataSender** +objects are contained in a **DetectorToDaqConnection** along with a +single **DetDataReceiver**. + +### Resource handling in the readout map + +The **DetectorToDaqConnection** is a **ResourceSet** with a custom implementation of `compute_disabled_state()` that checks that the **DetDataReceiver** and at least one **DetDataSender** are enabled. + +The **DetDataSender** is a **ResourceSetDisableAND** that contains a set of **DetectorStream** **Resource**s. + + + +## Finite State Machines +Each controller (**RCApplication**) uses one **FSMConfiguration** object that describes action, transitions and sequences. + + ![FSM schema](fsm.png) + +## Notes + +### VirtualHost + + The idea is that this describes the subset of resources of a physical +host server that are available to an Application. For example two +applications may be assigned to the same physical server but each be +allocated resources of a different NUMA node. + +### **DaqApplication** and **DaqModule** + + The **DaqApplication** contains a list of **DaqModule**s each of which has a +list of used resources. The **DaqApplication** provides a method +`get_used_hostresources` which can be called by `appfwk` in order to check +that these resources are indeed associated with the VirtualHost by +comparing with those listed in its `hw_resources` relationship. + +### NetworkConnection + Describes the connection type and points to the **Service** running over this connection. + + + +----- + + +_Last git commit to the markdown source of this page:_ + + +_Author: Gordon Crone_ + +_Date: Mon Oct 27 15:46:35 2025 +0000_ + +_If you see a problem with the documentation on this page, please file an Issue at [https://github.com/DUNE-DAQ/confmodel/issues](https://github.com/DUNE-DAQ/confmodel/issues)_ + diff --git a/docs/packages/confmodel/ReadoutMap.png b/docs/packages/confmodel/ReadoutMap.png new file mode 100644 index 00000000000..36aaa4b3d0a Binary files /dev/null and b/docs/packages/confmodel/ReadoutMap.png differ diff --git a/docs/packages/confmodel/environment.png b/docs/packages/confmodel/environment.png new file mode 100644 index 00000000000..7862878448d Binary files /dev/null and b/docs/packages/confmodel/environment.png differ diff --git a/docs/packages/confmodel/fsm.png b/docs/packages/confmodel/fsm.png new file mode 100644 index 00000000000..2c7152df115 Binary files /dev/null and b/docs/packages/confmodel/fsm.png differ diff --git a/docs/packages/confmodel/resources.png b/docs/packages/confmodel/resources.png new file mode 100644 index 00000000000..22f6baf3328 Binary files /dev/null and b/docs/packages/confmodel/resources.png differ diff --git a/docs/packages/confmodel/resourcetree.png b/docs/packages/confmodel/resourcetree.png new file mode 100644 index 00000000000..98bfa35fd83 Binary files /dev/null and b/docs/packages/confmodel/resourcetree.png differ diff --git a/docs/packages/confmodel/schema.png b/docs/packages/confmodel/schema.png new file mode 100644 index 00000000000..3f54e25483e Binary files /dev/null and b/docs/packages/confmodel/schema.png differ diff --git a/docs/packages/confmodel/schema.view b/docs/packages/confmodel/schema.view new file mode 100644 index 00000000000..663ca696f5a --- /dev/null +++ b/docs/packages/confmodel/schema.view @@ -0,0 +1,30 @@ +DetDataSender,429,221 +DetDataReceiver,856,113 +ResourceTree,1011,13 +Session,948,294 +DetectorConfig,888,599 +VariableBase,391,307 +VariableSet,202,228 +Variable,420,377 +Resource,617,14 +ResourceSet,670,150 +ResourceSetDisableAND,663,226 +Segment,649,452 +Application,135,375 +RCApplication,527,616 +DaqApplication,134,674 +DaqModule,358,817 +Connection,125,814 +NetworkConnection,2,947 +Queue,244,946 +NetworkInterface,961,777 +NetworkDevice,950,946 +HostComponent,777,831 +VirtualHost,403,717 +PhysicalHost,794,725 +ProcessingResource,605,949 +StorageDevice,780,942 +ConnectionService,107,560 +Service,28,227 +DetectorStream,475,127 +DetectorToDaqConnection,877,175 diff --git a/docs/packages/connectivityserver/README.md b/docs/packages/connectivityserver/README.md new file mode 100644 index 00000000000..a6bc6eaf3cc --- /dev/null +++ b/docs/packages/connectivityserver/README.md @@ -0,0 +1,87 @@ +# connectivityserver + + This service provides a very simple flask based +server to serve connection information to DAQ applications. + + +## REST interface + + The server reponds to the following uris + +### /publish + Allows publication of connection information. The content of the + request should be JSON encoded. For example, the following json file + can be published using curl. + +``` +> cat publish.json +{ + "connections":[ + { + "connection_type":0, + "data_type":"TPSet", + "uid":"DRO-000-tp_to_trigger", + "uri":"tcp://192.168.1.100:1234" + }, + { + "connection_type":0, + "data_type":"TPSet", + "uid":"DRO-001-tp_to_trigger", + "uri":"tcp://192.168.1.100:1235" + } + ], + "partition":"ccTest" +} + +> curl -d @publish.json -H "content-type: application/json" \ + http://connection-flask.connections:5000/publish +``` + +### /getconnection/ +This uri returns a list of connections matching the 'uid_regex' and +'data_type' specified in the JSON encoded request. + +``` +curl -d '{"uid_regex":"DRO.*","data_type":"TPSet"}' \ + -H "content-type: application/json" \ + http://connection-flask.connections:5000/getconnection/ccTest +[{"uid": "DRO-000-tp_to_trigger", "uri": "tcp://192.168.1.100:1234", "connection_type": 0, "data_type": "TPSet"}, {"uid": "DRO-001-tp_to_trigger", "uri": "tcp://192.168.1.100:1235", "connection_type": 0, "data_type": "TPSet"}] +``` + + +### /retract +This uri should be used to remove published connections. The request should be JSON encoded with the keys "partition" and "connections" with the latter being an array of "connection_id" and "data_type" values. + + +### /retract-partition +This uri should be used to remove all published connections from the +given partition. The request should be JSON encoded with one field "partition" naming the partition to be retracted. + +## Running the server locally from the command line + The server is intended to be run under the Gunicorn web server. + + ``` + gunicorn -b 0.0.0.0:5000 --workers=1 --worker-class=gthread --threads=2 \ + --timeout 5000000000 connectivityserver.connectionflask:app + ``` + +Some debug information will be printed by the connection-flask if the +environment variable 'CONNECTION_FLASK_DEBUG' is set to a number +greater than 0. Currently 1 will print timing information for the +publish/lookup calls. 2 will give information about what was +published/looked up and 3 is even more verbose printing the actual +JSON of the requests. + + +----- + + +_Last git commit to the markdown source of this page:_ + + +_Author: Gordon Crone_ + +_Date: Thu Oct 16 16:54:02 2025 +0100_ + +_If you see a problem with the documentation on this page, please file an Issue at [https://github.com/DUNE-DAQ/connectivityserver/issues](https://github.com/DUNE-DAQ/connectivityserver/issues)_ + diff --git a/docs/packages/connectivityserver/deploy/README.md b/docs/packages/connectivityserver/deploy/README.md new file mode 100644 index 00000000000..eb7b29f4640 --- /dev/null +++ b/docs/packages/connectivityserver/deploy/README.md @@ -0,0 +1,50 @@ +# connectivityserver + + This service provides a very simple flask based +server to serve connection information to DAQ applications. + +## Installation + +To build the docker image of develop just do +```bash +docker buildx build --tag ghcr.io/dune-daq/connectivityserver:latest . +``` +Or, if you want to specify a tag +```bash +docker buildx build --tag ghcr.io/dune-daq/connectivityserver:v1.3.0 --build-arg VERSION=v1.3.0 . +``` + + Apply the kubernetes manifest from connectivityserver.yaml. This + should start a service called connectionservice in the namespace + connections. + +``` +kubectl apply -f connectivityserver.yaml +``` + +To test the basic operation of the server, you can connect to pod in the k8s cluster and try getting the root document. + +``` +> kubectl exec myPod -i -t -- bash +[root@myPod /]# curl http://connectionservice.connections:5000 +

Dump of configuration dictionary

Active partitions

None


Server statistics

Since 2023-03-16 09:15:06.571492

0 calls to publish in total time 0:00:00 (average 0 µs per call)

0 calls to lookup in total time 0:00:00 (average 0 µs per call)

Maximum number of partitions active = 0

+[root@myPod /]# +``` + +## Connectivityserver operation +Please refer to the documentaion in the +connectivityserver package [https://github.com/DUNE-DAQ/connectivityserver]. + + +----- + + +_Last git commit to the markdown source of this page:_ + + +_Author: Gordon Crone_ + +_Date: Thu Oct 16 16:54:02 2025 +0100_ + +_If you see a problem with the documentation on this page, please file an Issue at [https://github.com/DUNE-DAQ/connectivityserver/issues](https://github.com/DUNE-DAQ/connectivityserver/issues)_ + diff --git a/docs/packages/ctbmodules/README.md b/docs/packages/ctbmodules/README.md new file mode 100644 index 00000000000..41272359f41 --- /dev/null +++ b/docs/packages/ctbmodules/README.md @@ -0,0 +1,74 @@ +# ctbmodules - DUNE DAQ module to control and read out the CTB hardware + +Ported from original implementation in redmine: + + + + + + + + +## Instructions to update the configuration and run with dunedaq v5 line + +### Area setup +First of all you need a v5 area. +To do this follow the instructions in the daqconf wiki, for example [fddaq-v5.3.2](https://github.com/DUNE-DAQ/daqconf/wiki/Setting-up-a-fddaq%E2%80%90v5.3.2-software-area). + +Locally, in the top area, you also need the [base configuration repository](https://gitlab.cern.ch/dune-daq/online/ehn1-daqconfigs). +Please note that the repo on gitlab are only accessible via ssh key, so please register one in the CERN gitlab. +I recommend you also set in your area a `.netrc` file as in the `np04daq` home, remember to change login to your CERN username. +After that you can simply +```bash +git clone ssh://git@gitlab.cern.ch:7999/dune-daq/online/ehn1-daqconfigs.git +``` +Or alternatively +```bash +cpm-setup -b fddaq-v5.3.2 ehn1-daqconfigs +``` +The first one is a direct clone, while the second sets up the configuration repo to do some more advance operation, so the default branches might be a little strange. +Further domentation on the various `cpm-*` commands can be found in [the runconftools documentation](https://github.com/DUNE-DAQ/runconftools/blob/develop/docs/README.md). +The second only works if you have setup the `.netrc` file correctly. +To conclude, just +```bash +source ehn1-daqconfigs/setup_db_path.sh +``` + +### Update the CTB configuration +The ehn1-daqconfigs contains already a valid configuration for the CTB. +Due to the implementation of HLTs and LLTs as confmodel::resource, it's best if any branch of ehn1-daqconfigs contains only one version of each. +So, as CTB experts the only thing you should do is to update the value of the objects already created in ehn1-daqconfigs. + +In order to do so, there is a script called `update_ctb_settings`. +Typical usage is: +```bash +update_ctb_settings ehn1-daqconfigs/sessions/np02-session.data.xml +``` +This will do the following: + - It will change the value of every objects in the configuration related to the CTB according to the the json file you provide + - It will enable/disable HLTs and LLTs according to your configuration +Please keep in mind that HLTs can also be enabled/disabled via the shifter interface (see dedicated section). + +Once you are happy with the changes, you can commit and push the changes on a branch on ehn1-daqconfigs and open a Merge request toward the dedicate branch. + +### Run the CTB configuration +in order to run, start using the shifter interface in local mode: +```bash +runconf-shifter-ui -l -d ehn1-daqconfigs +``` +From the inerface select which components you need, select which HLTs you want to enable and click `create`. +The output of the shifter interface will tell you how to run. + + +----- + + +_Last git commit to the markdown source of this page:_ + + +_Author: Marco Roda_ + +_Date: Wed Jun 18 11:19:38 2025 +0200_ + +_If you see a problem with the documentation on this page, please file an Issue at [https://github.com/DUNE-DAQ/ctbmodules/issues](https://github.com/DUNE-DAQ/ctbmodules/issues)_ + diff --git a/docs/packages/daq-assettools/README.md b/docs/packages/daq-assettools/README.md index 31df43f8c15..d9429d3826d 100644 --- a/docs/packages/daq-assettools/README.md +++ b/docs/packages/daq-assettools/README.md @@ -1,70 +1,37 @@ # DAQ Asset Tools +## Overview + DAQ asset files are stored under a 3-level hashed directory in `/cvmfs/dunedaq.opensciencegrid.org/assets/files`. Each asset file has an associated json file with its metadata under the same directory. There is a SQLite database file (`dunedaq-asset-db.sqlite`) under `/cvmfs/dunedaq.opensciencegrid.org/assets`. Metadata of the files are also stored in this database file. -This repository contains a set of tools to manage these DAQ asset files. +This repository contains a set of tools to manage these DAQ asset files, available [once the standard DUNE DAQ environment has been set up](https://dune-daq-sw.readthedocs.io/en/latest/packages/daq-buildtools/). -- `assets-list`: list asset files; -- `assets-add`: adding new asset files to the catalog; -- `assets-update`: update asset files' metadata; -- `assets-retire`: retire asset files. +- `assets-list`: list asset files +- `assets-add`: adding new asset files to the catalog +- `assets-update`: update asset files' metadata +- `assets-retire`: retire asset files -Files listed in this [spreadsheet](https://docs.google.com/spreadsheets/d/1oDYe1eEqJhkY0DTd6mfpLw9ou7TqBCaDEgTo0qqVmqY/edit#gid=0) are being cataloged. When adding new files, please add new entries to the spreadsheet and let Software Coordination team to catalog and publish the files. +Each command has a `-h` option which will tell you how to use it in detail; some of the highlights are covered in this document. -### Installation +Files which are part of our assets are catalogued in this [spreadsheet](https://docs.google.com/spreadsheets/d/1oDYe1eEqJhkY0DTd6mfpLw9ou7TqBCaDEgTo0qqVmqY/edit#gid=0), where they provide info to users about each asset. When developers and testers want a new asset, they should open an issue in this repository and select the "Request to add a DAQ asset file" form. The Software Coordination team will then publish the file to `cvmfs`. -`pip install git+https://github.com/DUNE-DAQ/daq-assettools@v1.0.0#egg=daq-assettools` +Note that asset files shouldn't exceed more than a couple hundred MB in size; cvmfs responds badly to files larger than that. -## How to get path to asset files +## How to see which asset files are available -`assets-list` is the tool for getting the path to asset files. +`assets-list` is the tool for this. It's a flexible tool; see `assets-list -h` for all available options. Here are some examples: -Examples: - `assets-list --subsystem readout` -- `assets-list --subsystem readout --copy-to ./`: list files of `readout` subsystem, and copy them to the current directory. The copied file will be renamed as `file-.ext`, assuming its original file name is `file.ext`; -- `assets-list -c dc74fe934cfb603d74ab6e54a0af7980`: list single file matching the MD5 file checksum; -- `assets-list -c dc74fe934cfb603d74ab6e54a0af7980 --copy-to ./`: list single file matching the MD5 file checksum and copy the file to the current directory; -- `assets-list -c dc74fe934cfb603d74ab6e54a0af7980 | awk '{print $NF}'`: get the file path only; +- `assets-list --subsystem readout --copy-to ./`: list files of `readout` subsystem, and copy them to the current directory. The copied file will be renamed as `file-.ext`, assuming its original file name is `file.ext` +- `assets-list -c dc74fe934cfb603d74ab6e54a0af7980`: list single file matching the MD5 file checksum +- `assets-list -c dc74fe934cfb603d74ab6e54a0af7980 --copy-to ./`: list single file matching the MD5 file checksum and copy the file to the current directory - `assets-list --subsystem readout --format binary --status valid --print-metadata` -``` -usage: assets-list [-h] [--db-file DB_FILE] [-n NAME] - [--subsystem {readout,trigger}] [-l LABEL] - [-f {binary,text}] - [--status {valid,expired,new_version_available}] - [--description DESCRIPTION] [--replica-uri REPLICA_URI] - [-p] [--copy-to COPY_TO] - -optional arguments: - -h, --help show this help message and exit - --db-file DB_FILE path to database file (default: - /cvmfs/dunedaq.opensciencegrid.org/assets/dunedaq- - asset-db.sqlite) - -n NAME, --name NAME asset name (default: None) - --subsystem {readout,trigger} - asset subsystem (default: None) - -l LABEL, --label LABEL - asset label (default: None) - -f {binary,text}, --format {binary,text} - asset file format (default: None) - --status {valid,expired,new_version_available} - asset file status (default: None) - -c CHECKSUM, --checksum CHECKSUM - MD5 checksum of asset file (default: None) - --description DESCRIPTION - description of asset file (default: None) - --replica-uri REPLICA_URI - replica URI (default: None) - -p, --print-metadata print full metadata (default: False) - --copy-to COPY_TO path to the directory where asset files will be copied to. (default: None) - -``` - ## How to add, update, and retire asset files -Note: these operations require write permissions to the database file, and file storage directories. Only Software Coordination team members need to perform these operations. +_Note: these operations require write permissions to the database file, and file storage directories. Only Software Coordination team members need to perform these operations._ ### `assets-add` @@ -73,118 +40,73 @@ Note: these operations require write permissions to the database file, and file The tool can take metadata fields from command line as well as from a JSON file. If both are presented, command-line entries take the precedence. Examples: -- `assets-add -s ./frames.bin --db-file ./dunedaq-asset-db.sqlite -n frames.bin -f binary --status valid --subsystem readout --label ProtoWIB --description "Used for FE emulation in FakeCardReader"` -``` -usage: assets-add [-h] [--db-file DB_FILE] [-n NAME] - [--subsystem {readout,trigger}] [-l LABEL] - [-f {binary,text}] - [--status {valid,expired,new_version_available}] - [--description DESCRIPTION] [--replica-uri REPLICA_URI] - [-s SOURCE] [--json-file JSON_FILE] - -optional arguments: - -h, --help show this help message and exit - --db-file DB_FILE path to database file (default: - /cvmfs/dunedaq.opensciencegrid.org/assets/dunedaq- - asset-db.sqlite) - -n NAME, --name NAME asset name (default: None) - --subsystem {readout,trigger} - asset subsystem (default: None) - -l LABEL, --label LABEL - asset label (default: None) - -f {binary,text}, --format {binary,text} - asset file format (default: None) - --status {valid,expired,new_version_available} - asset file status (default: None) - -c CHECKSUM, --checksum CHECKSUM - MD5 checksum of asset file (default: None) - --description DESCRIPTION - description of asset file (default: None) - --replica-uri REPLICA_URI - replica URI (default: None) - -s SOURCE, --source SOURCE - path to asset file (default: None) - --json-file JSON_FILE - json file containing file metadata (default: None) - -``` +- `assets-add -s ./frames1234.bin --db-file ./dunedaq-asset-db.sqlite -n frames1234.bin -f binary --status valid --subsystem readout --label WIBEth --description "Used for FE emulation in FakeCardReader"` ### `assets-update` Use `assets-update` to update certain metadata fields of a file. Similar as other tools, it takes the metadata fields from command-line for matching files in the database. Additionally, it takes a JSON string from command-line for the new metadata. Examples: -- `assets-update --subsystem readout --label ProtoWIB --json-string '{"description": "Used for FE emulation in FakeCardReader during Integration Week."}'` -- `assets-update -c dc74fe934cfb603d74ab6e54a0af7980 --json-string '{"status": "valid"}'` -``` -usage: assets-update [-h] [--db-file DB_FILE] [-n NAME] - [--subsystem {readout,trigger}] [-l LABEL] - [-f {binary,text}] - [--status {valid,expired,new_version_available}] - [--description DESCRIPTION] [--replica-uri REPLICA_URI] - [--json-string JSON_STRING] - -optional arguments: - -h, --help show this help message and exit - --db-file DB_FILE path to database file (default: - /cvmfs/dunedaq.opensciencegrid.org/assets/dunedaq- - asset-db.sqlite) - -n NAME, --name NAME asset name (default: None) - --subsystem {readout,trigger} - asset subsystem (default: None) - -l LABEL, --label LABEL - asset label (default: None) - -f {binary,text}, --format {binary,text} - asset file format (default: None) - --status {valid,expired,new_version_available} - asset file status (default: None) - -c CHECKSUM, --checksum CHECKSUM - MD5 checksum of asset file (default: None) - --description DESCRIPTION - description of asset file (default: None) - --replica-uri REPLICA_URI - replica URI (default: None) - --json-string JSON_STRING - json string to be updated in metadata (default: None) -``` +- `assets-update --subsystem readout --label WIBEth --json-string '{"description": "Used for FE emulation in FakeCardReader during Integration Week."}'` +- `assets-update -c dc74fe934cfb603d74ab6e54a0af7980 --json-string '{"status": "valid"}'` ### `assets-retire` `assets-retire` is the tool to retire a file. The operation is as simple as change its metadata field 'status' to 'expired'. It will not delete the file itself. Examples: + - `assets-retire -c dc74fe934cfb603d74ab6e54a0af7980` +### Publishing changes to cvmfs + +Publishing changes to cvmfs can be done via the following steps: + + + +1. Prepare changes in a local copy of the cvmfs repository's `assets` directory + + +2. On a cvmfs publisher node, open a cvmfs transaction, sync the `assets` directory in the repo to the local mirror with new changes, and publish the changes. + +The following code snippet shows a real-case example of adding a new file to the database, and "retire" a previous file. For space/logistical reasons it doesn't show that (1) the file also gets logged in the spreadsheet and (2) a DUNE DAQ environment has already been set up. + +#### Prepare changes in a local "assets" mirror + +```bash + +# Create a local mirror of "assets" + +rsync -vlprt /cvmfs/dunedaq.opensciencegrid.org/assets . + +# Make changes to the local assets mirror +# Specify the db file path with `--db-file` option so that the changes goes to the local mirror; + +## Adding a new file + +cd ./assets + +# Note that the name, label and description here are just given as examples +assets-add -s --db-file ./dunedaq-asset-db.sqlite -n wib_link_67.bin -f binary --status valid --subsystem readout --label WIBEth --description "Other WIBEth files have outdated detector_id fields in DAQEthHeader" + +## Retiring a file, referring to it by its hash + +assets-retire --db-file ./dunedaq-asset-db.sqlite -c a0ddae8343e82ba1a3668c5aea20f3d2 + +## More low-level: accomplishing the same as above, but via the assets-update command + +assets-update --db-file ./dunedaq-asset-db.sqlite -c a0ddae8343e82ba1a3668c5aea20f3d2 --json-string '{"status": "expired"}' + ``` -usage: assets-retire [-h] [--db-file DB_FILE] [-n NAME] - [--subsystem {readout,trigger}] [-l LABEL] - [-f {binary,text}] - [--status {valid,expired,new_version_available}] - [-c CHECKSUM] [--description DESCRIPTION] - [--replica-uri REPLICA_URI] - -optional arguments: - -h, --help show this help message and exit - --db-file DB_FILE path to database file (default: - /cvmfs/dunedaq.opensciencegrid.org/assets/dunedaq- - asset-db.sqlite) - -n NAME, --name NAME asset name (default: None) - --subsystem {readout,trigger} - asset subsystem (default: None) - -l LABEL, --label LABEL - asset label (default: None) - -f {binary,text}, --format {binary,text} - asset file format (default: None) - --status {valid,expired,new_version_available} - asset file status (default: None) - -c CHECKSUM, --checksum CHECKSUM - MD5 checksum of asset file (default: None) - --description DESCRIPTION - description of asset file (default: None) - --replica-uri REPLICA_URI - replica URI (default: None) + +#### Publish changes to cvmfs + +Technical details of how to publish to cvmfs [is covered in the daq-release documentation](https://dune-daq-sw.readthedocs.io/en/latest/packages/daq-release/publish_to_cvmfs/#the-basics). Here, after modifying your local mirror of `assets`, you'd sync it to /cvmfs/dunedaq.opensciencegrid.org/assets: + +```bash +rsync -vlprt : /cvmfs/dunedaq.opensciencegrid.org ``` @@ -194,9 +116,9 @@ optional arguments: _Last git commit to the markdown source of this page:_ -_Author: Pengfei Ding_ +_Author: Kurt Biery_ -_Date: Fri Feb 10 02:43:31 2023 -0600_ +_Date: Mon Oct 13 21:21:07 2025 -0500_ _If you see a problem with the documentation on this page, please file an Issue at [https://github.com/DUNE-DAQ/daq-assettools/issues](https://github.com/DUNE-DAQ/daq-assettools/issues)_ diff --git a/docs/packages/daq-buildtools/README.md b/docs/packages/daq-buildtools/README.md index 4cacc9490d7..8003f915bd1 100644 --- a/docs/packages/daq-buildtools/README.md +++ b/docs/packages/daq-buildtools/README.md @@ -1,24 +1,16 @@ - -_JCF: This document was last edited Feb-10-2023_ - # DUNE DAQ Buildtools +_This document was last edited Dec-13-2025_ + `daq-buildtools` is the toolset to simplify the development of DUNE DAQ packages. It provides environment and building utilities for the DAQ Suite. +If you've read these instructions before, release notes for specific +versions of daq-buildtools can be found at the bottom of this +document. + ## System requirements -To get set up, you'll need access to the cvmfs Spack area -`/cvmfs/dunedaq-development.opensciencegrid.org/spack-nightly` as is -the case, e.g., on the lxplus machines at CERN. If you've been doing -your own Spack work on the system in question, you may also want to -back up (rename) your existing `~/.spack` directory to give Spack a -clean slate to start from in these instructions. - -You'll also want `python` to be version 3; to find out whether this is the case, run `python --version`. If it isn't, then you can switch over to Python 3 with the following simple commands: -``` -source `realpath /cvmfs/dunedaq.opensciencegrid.org/spack-externals/spack-installation/share/spack/setup-env.sh` -spack load python@3.8.3%gcc@8.2.0 -``` +To get set up, you'll need access to the cvmfs areas `/cvmfs/dunedaq.opensciencegrid.org` and `/cvmfs/dunedaq-development.opensciencegrid.org`. This is the case, e.g., on the np04 cluster at CERN. ## Setup of `daq-buildtools` @@ -26,15 +18,17 @@ spack load python@3.8.3%gcc@8.2.0 Simply do: ``` source /cvmfs/dunedaq.opensciencegrid.org/setup_dunedaq.sh -setup_dbt dunedaq-v3.2.2 # dunedaq-v3.2.2 is the latest daq-buildtools version as of Feb-10-2023 +setup_dbt fddaq-v5.5.0 ``` +Note that `fddaq-v5.5.0` is aliased to `v8.9.11`. After running these two commands, then you'll see something like: ``` -Added /cvmfs/dunedaq.opensciencegrid.org/tools/dbt/v7.0.0/bin -> PATH -Added /cvmfs/dunedaq.opensciencegrid.org/tools/dbt/v7.0.0/scripts -> PATH +Added /cvmfs/dunedaq.opensciencegrid.org/tools/dbt/v8.9.11/bin -> PATH +Added /cvmfs/dunedaq.opensciencegrid.org/tools/dbt/v8.9.11/scripts -> PATH DBT setuptools loaded ``` + If you type `dbt-` followed by the `` key you'll see a listing of available commands, which include `dbt-create`, `dbt-build`, `dbt-setup-release` and `dbt-workarea-env`. These are all described in the following sections. Each time that you log into a fresh Linux shell and want to either (1) set up an existing cvmfs-based DUNE DAQ software release or (2) develop code within a pre-existing DUNE DAQ work area, you'll need to set up daq-buildtools. These two cases are described in detail momentarily. For (1) you'd want to repeat the method above to set up daq-buildtools. For (2) it's easier instead to `cd` into the work area and source the file named `env.sh`. @@ -42,15 +36,17 @@ Each time that you log into a fresh Linux shell and want to either (1) set up an ## Running a release from cvmfs -If you simply want access to a DUNE DAQ software release (its executables, etc.) without actually developing DUNE DAQ software itself, you'll want to run a release from cvmfs. After setting up daq-buildtools, you can simply run the following command if you wish to use a frozen release: +If you only want access to a DUNE DAQ software release (its executables, etc.) without actually developing DUNE DAQ software itself, you'll want to run a release from cvmfs. Please note that in general, stable releases (especially patch stable releases) are intended for this scenario, and _not_ for development. After setting up daq-buildtools, you can simply run the following command if you wish to use a stable release: ```sh -dbt-setup-release # dunedaq-v3.2.2 is the latest frozen release as of Feb-10-2023 +dbt-setup-release # fddaq-v5.5.0-a9 the latest stable release as of Dec-13-2025 ``` -Instead of a frozen release you can also set up nightly releases, candidate releases or test releases using the same arguments as are described later for `dbt-create`; e.g. if you want to set up candidate release `rc-v3.2.1-2` you can do: +Note that if you set up a stable release you'll get a message along the lines of `Release "fddaq-v5.5.0-a9" requested; interpreting this as release "fddaq-v5.5.0-a9-1"`; this simply reflects that the latest build iteration of the stable release (`-1`, `-2`, etc.) has been alias'd out for the convenience of the user. + +Instead of a stable release you can also set up nightly releases or candidate releases using the same arguments as are described later for `dbt-create`; e.g. if you want to set up candidate release `fddaq-v5.2.0-rc3-a9` you can do: ``` -dbt-setup-release -b candidate rc-v3.2.1-2 +dbt-setup-release -b candidate fddaq-v5.2.0-rc3-a9 ``` `dbt-setup-release` will set up both the external packages and DAQ packages, as well as activate the Python virtual environment. Note that the Python virtual environment activated here is read-only. @@ -62,34 +58,30 @@ If you wish to develop DUNE DAQ software, you can start by creating a work area. Each work area is based on a DUNE DAQ software release, which defines what external and DUNE DAQ packages the code you develop in a work area are built against. Releases come in four categories: -* **Nightly Releases**: packages in nightly releases are built each night using the heads of their `develop` branches. Generally labeled as `N--
`, e.g. `N22-11-27`. +* **Nightly Releases**: packages in nightly releases are built each night using the heads of their `develop` and `production/v4` branches. Depending on whether it's the far detector stack or the near detector stack, and whether it's a develop or production build, these are generally labeled either as `NFD__
_` (far detector) or `NND__
_` (near detector). E.g. `NFD_DEV_240716_A9` is the AL9 nightly develop build for the far detector on July 16th, 2024, and `NFD_PROD4_250202_A9` is the v4 production nightly build on February 2, 2025. -* **Frozen Releases**: a frozen release typically comes out every couple of months, and only after extensive testing supervised by a Release Coordinator. Generally labeled as `dunedaq-vX.Y.X`, e.g. `dunedaq-v3.2.2` +* **Stable Releases**: a stable release typically comes out every couple of months, and only after extensive testing supervised by a Release Coordinator. Depending on whether it's the far detector stack or the near detector stack, this is labeled as `fddaq-vX.Y.X-` or `nddaq-vX.Y.Z-`, e.g., `fddaq-v4.4.4-a9`. -* **Candidate Releases**: a type of release meant specifically for frozen release testing. Generally labeled as `rc-vX.Y.Z-`, e.g. `rc-v3.2.1-1` +* **Candidate Releases**: a type of release meant specifically for stable release testing. Generally labeled as `fddaq-vX.Y.Z-rc-` or `nddaq-vX.Y.Z-rc-`. For example, `fddaq-v4.4.0-rc4-a9` is the fourth release candidate for the AL9 build of `fddaq-v4.4.0`. The majority of work areas are set up to build against the most recent nightly release. To do so, run: ```sh -dbt-create [-i/--install-pyvenv] -n # E.g., N22-11-27 or last_successful +dbt-create -n # E.g., NFD_DEV_240213_A9 ``` -...where in general the most popular `` is `last_successful`, which as the name suggests will translate to the date of the most recent successful nightly release. The optional `-i` argument will be discussed in a moment. - -To see all available nightly releases, run `dbt-create -l -n` or `dbt-create -l -b nightly`. +You can also use `-n last_fddaq` to build against the most recent _develop_ branch, e.g., `NFD_DEV_241007_A9`. To see all available nightly releases, run `dbt-create -l -n` or `dbt-create -l -b nightly`. Note also that you can leave out defining the name of the work area subdirectory, in which case it defaults to the same name as the release. If you want to build against a candidate release, run: ```sh -dbt-create [-i/--install-pyvenv] -b candidate # E.g., rc-v3.2.1-1 as of Nov-11-2022. +dbt-create -b candidate # E.g., fddaq-v4.4.0-rc4-a9 ``` ...where to see all available candidate releases, run `dbt-create -l -b candidate`. -To build against a test release, simply replace `candidate` above with `test`. And to build against a frozen release, you don't need the `-b ` argument at all. You can simply do: +And to build against a stable release (_not recommended_, as the codebase changes fairly rapidly), you don't need the `-b ` argument at all. You can just do: ``` -dbt-create [-i/--install-pyvenv] +dbt-create ``` -The option `-i/--install-pyvenv` for `dbt-create` is optional. By default, the Python virtual environment created in the work area will be a clone of an existing one from the release directory. This avoids the compilation/installation of Python modules using the `pyvenv_requirements.txt` in the release directory, and speeds up the work-area creation significantly. However, the first time running `dbt-create` with cloning on a node may take several minutes since cvmfs needs to fetch these files into local cache first, and `-i` is an option to avoid this. - -The structure of your work area will look like the following: +The structure of your work area will include the following files and directories: ```txt MyTopDir ├── build @@ -102,6 +94,31 @@ MyTopDir ``` The next section of this document concerns how to build code in your new work area. However, if you'd like to learn about how to retrieve information about your work area such as the release of the DUNE DAQ suite it builds against, you can skip ahead to [Finding Info on Your Work Area](#Finding_Info). +### Advanced `dbt-create` options + +Along with telling `dbt-create` what you want your work area to be named and what release you want it to be based off of, there are a few more options that give you finer-grained control over the work area. You can simply run `dbt-create -h` for a summary, but they're described in fuller detail here. + + +* `-s/--spack`: Install a local Spack instance in the work area. This will allow you to install and load whatever Spack packages you wish into your work area. + + +* `-q/--quick`: Use this if you don't plan to develop a Python package. This is much quicker than the default behavior of dbt-create, which will actually copy the Python virtual environment over to your work area, thereby giving you write permission to the project's Python packages. With `-q/--quick`, the Python virtual environment your work area uses is in the (read-only) release area on cvmfs. + + +* `-i/--install-pyvenv`: With this option, there will be compilation/installation of python modules using the `pyvenv_requirements.txt` in the release directory. This is typically slower than cloning, but not always. You can take further control by combining it with the `-p ` argument, though it's unlikely as a typical developer that you'd want a non-standard set of Python packages. + +### Cloning an entire work area + +A new (June 2025) pair of experimental scripts in daq-buildtools enables users to create a work area by cloning another work area, using a YAML recipe file as an intermediary. The basic approach is simple. To create a recipe file from an existing area, assuming its environment is set up, just do the following: +``` +dbtx-save-workarea-recipe.py +``` +and the script will generate a file called `.yaml`. This human-readable file will contain details about the original area, and can then be used later to generate a work area based on the same nightly/candidate/stable release as well as the same repos and their commits as the original area. To do so one can simply pass the file to `dbtx-create-workarea-from-recipe.py` as well as the desired name of the new work area: +``` +dbtx-create-workarea-from-recipe.py --workarea-name .yaml +``` +Both scripts have further options; pass `--help` as an argument to either one in order to get more details. + ## Cloning and building a package repo @@ -118,7 +135,6 @@ cd .. Note that in a "real world" situation [you'd be doing your development on a feature branch](https://dune-daq-sw.readthedocs.io/en/latest/packages/daq-release/development_workflow_gitflow/) in which case you'd add `-b ` to the `git clone` command above. - We're about to build and install the `listrev` package. (🔴 Note: if you are working with other packages, have a look at the [Working with more repos](#working-with-more-repos) subsection before running the following build command.) By default, the scripts will create a subdirectory of MyTopDir called `./install ` and install any packages you build off your repos there. If you wish to install them in another location, you'll want to set the environment variable `DBT_INSTALL_DIR` to the desired installation path before source-ing the `env.sh` script described below. You'll also want to remember to set the variable during subsequent logins to the work area if you don't go with the default. Now, do the following: @@ -131,7 +147,9 @@ dbt-build ### Working with more repos -To work with more repos, add them to the `./sourcecode` subdirectory as we did with listrev. Be aware, though: if you're developing a new repo which itself depends on another new repo, daq-buildtools may not already know about this dependency. "New" in this context means "not listed in `/cvmfs/dunedaq.opensciencegrid.org/spack/releases/dunedaq-v3.2.2/dbt-build-order.cmake`". If this is the case, add the names of your new package(s) to the `build_order` list found in `./sourcecode/dbt-build-order.cmake`, placing them in the list in the relative order in which you want them to be built. +To work with more repos, add them to the `./sourcecode` subdirectory as we did with listrev. Be aware, though: if you're developing a new repo which itself depends on another new repo, daq-buildtools may not already know about this dependency. If this is the case, add the names of your new package(s) to the `build_order` list found in `./sourcecode/dbt-build-order.cmake`, placing them in the list in the relative order in which you want them to be built. + +Note that as of daq-buildtools `v8.7.1`, you can replace the actual `./sourcecode` directory in your work area with a soft link called `sourcecode` which points to an actual `./sourcecode` directory elsewhere on your file system. As a reminder, once you've added your repos and built them, you'll want to run `dbt-workarea-env` so the environment picks up their applications, libraries, etc. @@ -149,11 +167,13 @@ dbt-build --clean --unittest # Blow away the contents of ./build, run config+ge ``` ..where in the above case, you blow away the contents of `./build`, run config+generate+build, install the result in `$DBT_INSTALL_DIR` and then run the unit tests. Be aware that for many packages, unit tests will only (fully) work if you've also rerun `dbt-workarea-env`. +To run any integration tests your repos may contain (e.g., `dfmodules`) , you can pass the `--integtest` option to `dbt-build`. + To check for deviations from the coding rules described in the [DUNE C++ Style Guide](https://dune-daq-sw.readthedocs.io/en/latest/packages/styleguide/), run with the `--lint` option: ``` dbt-build --lint ``` -...though be aware that some guideline violations (e.g., having a function which tries to do unrelated things) can't be picked up by the automated linter. (_n.b.: As of Nov-11-2022, the `llvm` package needed for linting has been removed from the environment. It's possible by the time you read this that the issue has been fixed_) Also note that you can use `dbt-clang-format.sh` in order to automatically fix whitespace issues in your code; type it at the command line without arguments to learn how to use it. +...though be aware that some guideline violations (e.g., having a function which tries to do unrelated things) can't be picked up by the automated linter. Also note that you can use `dbt-clang-format.sh` in order to automatically fix whitespace issues in your code; type it at the command line without arguments to learn how to use it. Note that unlike the other options to `dbt-build`, `--lint` and `--unittest` are both capable of taking an optional argument, which is the name of a specific repo in your work area which you'd like to either lint or run unit tests for. This can be useful if you're focusing on developing one of several repos in your work area; e.g. `dbt-build --lint `. With `--lint` you can get even more fine grained by passing it the name of a single file in your repository area; either the absolute path for the file or its path relative to the directory you ran `dbt-build` from will work. @@ -167,6 +187,21 @@ If you want to change cmake message log level, you can use the `--cmake-msg-lvl` dbt-build --cmake-msg-lvl= ``` +By default the build is performed using gcc's `O2` compilation flag. If you wish to use a different +``` +dbt-build --optimize-flag O3 # Or Og, etc. +``` +If you wish to only generate files but _not_ also perform a compilation (this is a kind of expert action, but there are use cases for it) you can run: +``` +dbt-build --codegen-only +``` + +If you want to troubleshoot your code by taking advantage of `gcc`'s `-fsanitize` option, you can forward an argument to it via `dbt-build`'s `--sanitize` option. Note that in order to keep things consistent a clean build is required for this. One example: +``` +dbt-build --clean --sanitize address # Will ensure -fsanitize=address is passed to gcc +``` +Depending on the argument provided, there may be some helpful tips at the bottom of the `dbt-build` output on how to run the code you've built with sanitization applied. + You can see all the options listed if you run the script with the `--help` command, i.e. ``` dbt-build --help @@ -177,7 +212,7 @@ Finally, note that both the output of your builds and your unit tests are logged ## Running -In order to access the applications, libraries and plugins built and installed into the `$DBT_INSTALL_DIR` area during the above procedure, the system needs to be instructed on where to look for them. This is accomplished via tha `dbt-workarea-env` command you've already seen. E.g., log into a new shell, cd into your work area, then do the following: +In order to access the applications, libraries and plugins built and installed into the `$DBT_INSTALL_DIR` area during the above procedure, the system needs to be instructed on where to look for them. This is accomplished via tha `env.sh` file you've already seen. E.g., log into a new shell, cd into your work area, then do the following: ``` export DBT_INSTALL_DIR= # ONLY needed if you didn't use the default . ./env.sh @@ -187,11 +222,7 @@ Note that if you add a new repo to your work area, after building your new code Once the runtime environment is set, just run the application you need. listrev, however, has no applications; it's just a set of DAQ module plugins which get added to CET_PLUGIN_PATH. -Now that you know how to set up a work area, a nice place to learn a bit about the DUNE DAQ suite is via the `daqconf` package. Take a look at its documentation [here](https://dune-daq-sw.readthedocs.io/en/latest/packages/daqconf/); note that in parts of the `daqconf` instructions you're told to run daq-buildtools commands which you may already have run (e.g., to create a new work area) in which case you can skip those specific commands. - -A classic option for learning about how to run DAQ modules in a work area is [the listrev documentation](https://dune-daq-sw.readthedocs.io/en/latest/packages/listrev/). - -In both the links above you'll notice you'll be running a program called `nanorc` to run the DAQ. To learn more about `nanorc` itself, take a look at [the nanorc documentation](https://dune-daq-sw.readthedocs.io/en/latest/packages/nanorc/). +Now that you know how to set up a work area, a classic option for learning about how to run DAQ modules in a work area is [the listrev documentation](https://dune-daq-sw.readthedocs.io/en/latest/packages/listrev/). @@ -199,27 +230,93 @@ In both the links above you'll notice you'll be running a program called `nanorc A couple of things need to be kept in mind when you're building code in a work area. The first is that when you call `dbt-build`, it will build your repos against a specific release of the DUNE DAQ software stack - namely, the release you (or someone else) provided to `dbt-create` when the work area was first created. Another is that the layout and behavior of a work area is a function of the version of daq-buildtools which was used to create it. As a work area ages it becomes increasingly likely that a problem will occur when you try to build a repo in it; this is natural and unavoidable. -As such, it's important to know the assumptions a work area makes when you use it to build code. In the base of your work area is a file called `dbt-workarea-constants.sh`, which will look something like the following: +As such, it's important to know the assumptions a work area makes when you use it to build code. This section covers ways to learn details about your work area and its contents. + +### `dbt-info` + +A useful script to call to get immediate information on your development environment is `dbt-info`. For a full set of options you can simply run `dbt-info --help`, but for a quick summary, we have the following: + + +* `dbt-info release`: tells you if it's a far detector or near detector release, what its name is (e.g. `NFD_DEV_240213_A9`), what the name of the base release is, and where the release is located in cvmfs. + + +* `dbt-info package `: tells you info about the DUNE DAQ package whose name you provide it (git commit hash of its code, etc.). Passing "all" as the package name gives you info for all the DUNE DAQ packages. + + +* `dbt-info external `: `external` is same as the `package` option, except you use it when you want info not on a DUNE DAQ package but an external package (e.g., `boost`) + + +* `dbt-info pymodule `: get the version of a Python module. Response will differ depending on whether you have a local Python environment in your work area. + + +* `dbt-info sourcecode`: will tell you the branch each of the repos in your work area is on, as well as whether the code on the branch has been edited (indicated by an `*`) + + +* `dbt-info release_size`: tells you the # of packages and memory (in KB) used by each of the release, the base release, and the externals. + +### `dbt-workarea-constants.sh` + +In the base of your work area is a file called `dbt-workarea-constants.sh`, which will look something like the following: ``` -export SPACK_RELEASE="N22-09-23" -export SPACK_RELEASES_DIR="/cvmfs/dunedaq-development.opensciencegrid.org/nightly" -export DBT_ROOT_WHEN_CREATED="/cvmfs/dunedaq.opensciencegrid.org/tools/dbt/v6.0.2" +export SPACK_RELEASE="fddaq-v4.1.0" +export SPACK_RELEASES_DIR="/cvmfs/dunedaq.opensciencegrid.org/spack/releases" +export DBT_ROOT_WHEN_CREATED="/cvmfs/dunedaq.opensciencegrid.org/tools/dbt/v7.2.1" +export LOCAL_SPACK_DIR="/home/jcfree/daqbuild_fddaq-v4.1.0/.spack" ``` This file is sourced whenever you run `dbt-workarea-env`, and it tells both the build system and the developer where they can find crucial information about the work areas' builds. Specifically, these environment variables mean the following: -* `$SPACK_RELEASE`: this is the release of the DUNE DAQ software stack against which repos will build (e.g. `dunedaq-v2.10.2`, `N22-04-09`, etc.) +* `$SPACK_RELEASE`: this is the release of the DUNE DAQ software stack against which repos will build (e.g. `fddaq-v4.4.0-rc4-a9`, `NFD_DEV_240213_A9`, etc.) -* `$SPACK_RELEASES_DIR`: The base of the directory containing the DUNE DAQ software installations. The directory `$SPACK_RELEASES_DIR/$SPACK_RELEASE` contains the installation of the packages for your release +* `$SPACK_RELEASES_DIR`: The base of the directory containing the DUNE DAQ software installations. * `DBT_ROOT_WHEN_CREATED`: The directory containing the `env.sh` file which was sourced before this work area was first created -There are also useful Spack commands which can be executed to learn about the versions of the individual packages you're working with, once you've run `dbt-workarea-env` or `dbt-setup-release`. An [excellent Spack tutorial](https://spack-tutorial.readthedocs.io/en/latest/tutorial_basics.html) inside the official Spack documentation is worth a look, but a few Spack commands can be used right away to learn about a work area: +* `LOCAL_SPACK_DIR`: If the `-s/--spack` was passed to `dbt-create` when the work area was built, this points to where the local Spack area is located + +If you set up your work area using `daq-buildtools v8.6.1` or later (i.e., using the `develop` line instead of `production/v4`), you'll also see something like +``` +export DUNE_DAQ_RELEASE_SOURCE="/cvmfs/dunedaq-development.opensciencegrid.org/candidates/fddaq-v5.1.0-rc1-a9/sourcecode" +``` +`DUNE_DAQ_RELEASE_SOURCE` points to a cvmfs area containing the source code used to build this release. This can be useful for inspecting packages not checked out locally under `$DBT_AREA_ROOT/sourcecode`. + +### `dbt-lcov.sh` + +Strictly speaking, this script is more about finding info about your code than about your work area. It determines what fraction of your lines of code and functions the unit tests in your work area's repos cover. This script wraps calls to our installed external [`lcov` package](https://github.com/linux-test-project/lcov). Assuming you've set up your work area's enviroment and are in its base, if you run +``` +dbt-lcov.sh +``` +what will happen is that, if it hasn't already been run, the script will insert a few lines of CMake code into the `sourcecode/CMakeLists.txt` file which will ensure that when the repos are built the output will be instrumented in a manner `lcov` can use. It will then perform a clean build now that `sourcecode/CMakeLists.txt` has been modified, followed by a run of the unit tests. It will then output the results in a subdirectory called `./code_coverage_results`; in particular, `./code_coverage_results/html/index.html` is a webpage which will display the fractions mentioned above. + +Please note that due to the modification of `sourcecode/CMakeLists.txt`, you wouldn't want to use the code you build for normal running (e.g., for performance testing or data readout). Likely it's best to use a work area dedicated to code coverage study as opposed to other functions. + +### Useful Spack commands + +There are also useful Spack commands which can be executed to learn about the versions of the individual packages you're working with, once you've run `dbt-workarea-env` or `dbt-setup-release`. An [excellent Spack tutorial](https://spack-tutorial.readthedocs.io/en/latest/tutorial_basics.html) inside the official Spack documentation is worth a look, but a few Spack commands can be used right away to learn more about your environment. They're presented both for the case of you having set up a nightly release and a stable release: + +* `spack find -N -d --loaded | grep NB` will tell you all the DUNE DAQ packages shared by both far- and near detector software which have been loaded by `dbt-workarea-env` or `dbt-setup-release` + +* `spack find -N -d --loaded | grep NFD` for far detector-specific DUNE DAQ packages + +* `spack find -N -d --loaded | grep NND` for near detector-specific DUNE DAQ packages + +* `spack find -N -d --loaded | grep dunedaq-externals` for external packages not developed by DUNE collaborators + +* `spack find -p ` will tell you the path to the actual contents of a Spack-installed package + +Finally, when `dbt-build` is run, a file called `daq_app_rte.sh` is +produced and placed in your installation area (`$DBT_INSTALL_DIR`). You generally don't need to think about `daq_app_rte.sh` unless you're curious; it's a sourceable file which contains environment variables that [drunc](https://dune-daq-sw.readthedocs.io/en/latest/packages/drunc/) uses to launch processes when performing runs. + +## Release Notes + +[`v8.9.4` release notes](https://github.com/DUNE-DAQ/daq-buildtools/releases/tag/v8.9.4) + +[`v8.9.2` release notes](https://github.com/DUNE-DAQ/daq-buildtools/releases/tag/v8.9.2) -* `spack find --loaded -N | grep $SPACK_RELEASE` will tell you all the DUNE DAQ packages which have been loaded by `dbt-workarea-env` or `dbt-setup-release` +[`v8.9.1` release notes](https://github.com/DUNE-DAQ/daq-buildtools/releases/tag/v8.9.1) -* `spack find --loaded -N | grep dunedaq-externals` is the same, but will tell you all the external packages +[`v8.9.0` release notes](https://github.com/DUNE-DAQ/daq-buildtools/releases/tag/v8.9.0) -* `spack find --loaded -p ` will tell you the path to the actual contents of a Spack-installed package +[`v8.8.0` release notes](https://github.com/DUNE-DAQ/daq-buildtools/releases/tag/v8.8.0) ## Next Step @@ -237,7 +334,7 @@ _Last git commit to the markdown source of this page:_ _Author: John Freeman_ -_Date: Fri Feb 10 09:43:50 2023 -0600_ +_Date: Sat Dec 13 11:21:28 2025 -0600_ _If you see a problem with the documentation on this page, please file an Issue at [https://github.com/DUNE-DAQ/daq-buildtools/issues](https://github.com/DUNE-DAQ/daq-buildtools/issues)_ diff --git a/docs/packages/daq-cmake/README.md b/docs/packages/daq-cmake/README.md index 2a1e5f6acad..ed672cc5251 100644 --- a/docs/packages/daq-cmake/README.md +++ b/docs/packages/daq-cmake/README.md @@ -1,20 +1,22 @@ # daq-cmake -_JCF, Sep-28-2022: the following daq-cmake documentation assumes you're using (a candidate) dunedaq-v3.2.0 or a recent nightly as it covers the new `create_dunedaq_package` script. For daq-cmake documentation prior to this addition please go [here](https://dune-daq-sw.readthedocs.io/en/v3.1.1/packages/daq-cmake/)_ - This package provides CMake support for DUNE-DAQ packages. The documentation for this package is divided into four parts: + 1) Instructions for `create_dunedaq_package`, a script which will generate a good deal of CMake/C++ code which is standard across all DUNE DAQ packages + 2) A description of the standard structure and CMake build code in a DUNE DAQ package + 3) A complete reference manual for the DUNE-DAQ-specific CMake functions developers can call in order to specify their package's build + 4) A description of how we use schema in order to consistently define data structures -Note that this documentation assumes you have some familiarity with the [daq-buildtools package](https://dune-daq-sw.readthedocs.io/en/latest/packages/daq-buildtools) and know how to set up a development area and run commands to build code in it. +Note that this documentation assumes you have some familiarity with the [daq-buildtools package](https://dune-daq-sw.readthedocs.io/en/latest/packages/daq-buildtools) and know how to set up a development area and run commands to build code in it. ## The `create_dunedaq_package` script -A DUNE DAQ software package is composed of various types of software components - standalone applications, libraries, [DAQModules](https://dune-daq-sw.readthedocs.io/en/latest/packages/appfwk/), etc. Across the packages there are common ways these are implemented, whether as a result of our [official coding guidelines](https://dune-daq-sw.readthedocs.io/en/latest/packages/styleguide/) or simply through tradition. `create_dunedaq_package` takes advantage of these patterns and saves you work by generating much of the boilerplate code which makes up a DUNE DAQ package. +A DUNE DAQ software package is composed of various types of software components - standalone applications, libraries, [DAQModules](https://dune-daq-sw.readthedocs.io/en/latest/packages/appfwk/), etc. Across the packages there are common ways these are implemented, whether as a result of our [official coding guidelines](https://dune-daq-sw.readthedocs.io/en/latest/packages/styleguide/) or simply through tradition. `create_dunedaq_package` takes advantage of these patterns and saves you work by generating much of the boilerplate code which makes up a DUNE DAQ package. Before using `create_dunedaq_package`, you'll want to have some idea of what software components will make up your package, and what their names should be. While the only argument actually required by `create_dunedaq_package` is the name of your new package, it won't do much unless you provide it with options and arguments. You can see what these are by running `create_dunedaq_package -h`, reprinted here for your convenience. @@ -30,14 +32,14 @@ Arguments and options: `--test-app`: same as `--daq-module`, but for integration test applications -`--config-generation`: whether to generate a script which itself will generate JSON code to create an application based on the package. Requires at least one `--daq-module` as well. +`--pytest`: will create a Python program readable by the [pytest integration test framework](https://docs.pytest.org/en/stable/). It takes the name of the test as an argument; note the name needs to be of the form `*_test` or `test_*` so that pytest can work with it. -Note that some of these concepts, e.g. a user-oriented app vs. an app designed for integration tests of the package itself, are covered below in the [Overview of a DUNE DAQ package](#package_overview) section. +Note that some of these concepts, e.g. a user-oriented app vs. an app designed for integration tests of the package itself, are covered below in the [Overview of a DUNE DAQ package](#package_overview) section. In the directory `create_dunedaq_package` is run out of, `create_dunedaq_package` will create a subdirectory named after your package if such a subdirectory doesn't exist. If a subdirectory with that name already _does_ exist, it should be empty with the possible exceptions of a `README.md` documentation file and/or a `.git/` version control directory. These exceptions allow you to run the script using as an argument the name of a new repo which you've cloned into your area. An example of using `create_dunedaq_package` would be the following (note you can horizontal-scroll the command below): ``` cd ./sourcecode # If we were in the base of a development area -create_dunedaq_package --daq-module AFirstModule --config-generation --user-app an_app_for_users --user-app another_app_for_users --python-bindings --main-library thenewpackage +create_dunedaq_package --daq-module AFirstModule --user-app an_app_for_users --user-app another_app_for_users --python-bindings --main-library thenewpackage ``` (Of course in real life please use better names for your package and its components than those in the example). If you were to `ls thenewpackage`, you would see that the script had set up several new directories for you, as well as a `CMakeLists.txt` file: ``` @@ -53,37 +55,27 @@ schema src unittest ``` -where most of the directories contain boilerplate code for the software components you requested. While you'd be able to build this boilerplate package if it were in the `sourcecode/` directory of a standard DUNE DAQ development environment, the new package's components do almost nothing, although in the case of DAQModules code is generated which provide an example of how to set a member variable via Run Control configuration. Nonetheless this boilerplate code will need to be replaced, filled in and extended by the package's developers. Also if you look at `CMakeLists.txt`, you'll see that many of the function calls you'd need will have been added, though generally missing the arguments you'd need to provide them so they would know what libraries to link against, e.g.: +where most of the directories contain boilerplate code for the software components you requested. While you'd be able to build this boilerplate package if it were in the `sourcecode/` directory of a standard DUNE DAQ development environment, the new package's components do almost nothing. Nonetheless this boilerplate code will need to be replaced, filled in and extended by the package's developers. Also if you look at `CMakeLists.txt`, you'll see that many of the function calls you'd need will have been added, though generally missing the arguments you'd need to provide them so they would know what libraries to link against, e.g.: ``` daq_add_application(an_app_for_users an_app_for_users.cxx LINK_LIBRARIES ) # Any libraries to link in not yet determined ``` -Obviously comments such as `# Any libraries to link in not yet determined` should be deleted when it becomes appropriate. - -Note also that a unit test is automatically generated for you _which is designed to fail_. Developers are strongly encouraged to replace it with appropriate unit tests for their package, unless it's one of those rare packages which don't need unit tests, in which case the unit test functionality should be entirely stripped from the package. +Obviously comments such as `# Any libraries to link in not yet determined` should be deleted when it becomes appropriate. -If the `--config-generation` option is chosen, the script which gets produced is called `_gen`. You can pass it the `-h` option to see its arguments, but the main thing to know is that to pass it a set of arguments you'd want to do so via the `-c ` argument. An example of such a JSON file can be found in `/scripts/_example_config.json` file which is produced after you've run `create_dunedaq_package` with the `--config-generation` option. - -Assuming you're in the base of a development area [whose environment has been set up](https://dune-daq-sw.readthedocs.io/en/latest/packages/daq-buildtools) and have run the example `create_dunedaq_package` command above, you can now build your newly generated code and then try out the configuration generation script: -``` -dbt-build -dbt-workarea-env -thenewpackage_gen -c ./sourcecode/thenewpackage/scripts/thenewpackage_example_config.json anewconfig -``` -...where you can edit the values `num_afirstmodules` and `some_configured_value` in (a copy of) `thenewpackage_example_config.json` to generate a different configuration. Note that while this _legally_ runs in [`nanorc`](https://dune-daq-sw.readthedocs.io/en/latest/packages/nanorc/), it doesn't actually do anything -- in particular, the DAQ module(s) you've specified only set a member variable when configured, and don't communicate with anything. +Note also that a unit test is automatically generated for you _which is designed to fail_. Developers are strongly encouraged to replace it with appropriate unit tests for their package, unless it's one of those rare packages which don't need unit tests, in which case the unit test functionality should be entirely stripped from the package. -Now that you know how to generate the boilerplate for a DUNE DAQ package, please read on for a more in-depth understanding of what a typical DUNE DAQ package looks like. +Now that you know how to generate the boilerplate for a DUNE DAQ package, please read on for a more in-depth understanding of what a typical DUNE DAQ package looks like. ## Overview of a DUNE DAQ package ### Setting up a development area -To create a new package, you'll want to install a DUNE-DAQ development environment and then create a new CMake project for the package as described in [in the daq-buildtools documentation](https://dune-daq-sw.readthedocs.io/en/latest/packages/daq-buildtools/). +To create a new package, you'll want to install a DUNE-DAQ development environment and then create a new CMake project for the package as described in [in the daq-buildtools documentation](https://dune-daq-sw.readthedocs.io/en/latest/packages/daq-buildtools/). ### A package's subdirectory structure -To learn a bit more about how to structure your package so that it can be incorporated into the DUNE DAQ software suite, we'll play with a contrived package called "toylibrary". It's actually contained within a subdirectory of the daq-cmake repo; however, in order to be able to build toylibrary we'll want to copy it into the `./sourcecode` directory so the build system can work with it. Assuming you're already in the base directory of your development environment, do the following: +To learn a bit more about how to structure your package so that it can be incorporated into the DUNE DAQ software suite, we'll play with a contrived package called "toylibrary". It's actually contained within a subdirectory of the daq-cmake repo; however, in order to be able to build toylibrary we'll want to copy it into the `./sourcecode` directory so the build system can work with it. Assuming you're already in the base directory of your development environment, do the following: ``` git clone https://github.com/DUNE-DAQ/daq-cmake cd daq-cmake @@ -92,7 +84,7 @@ cd .. mv daq-cmake/toylibrary sourcecode rm -rf daq-cmake ``` -You can now build toylibrary like you would a standard DUNE DAQ package. Please note that if you do so, since toylibrary isn't an official DUNE DAQ package and isn't a git repository, you'll get a couple of warnings; unlike most warnings, you can disregard these. Specifically, these warnings are `Package "toylibrary" not provided to the daq_add_subpackages` and `warning: Not a git repository.` (and surrounding text). +You can now build toylibrary like you would a standard DUNE DAQ package. Please note that if you do so, since toylibrary isn't an official DUNE DAQ package and isn't a git repository, you'll get a couple of warnings; unlike most warnings, you can disregard these. Specifically, these warnings are `Package "toylibrary" not provided to the daq_add_subpackages` and `warning: Not a git repository.` (and surrounding text). In terms of its actual functionality, toylibrary is pretty useless (it contains a class which can wrap an integer, and another class which can print that wrapped integer). However, its functionality is beside the point; toylibrary contains many features which DUNE DAQ packages have in common, in particular DUNE DAQ packages which provide a library other developers want to link against. For starters, take a look at the subdirectories, `ls sourcecode/toylibrary`: @@ -109,18 +101,18 @@ In terms of its actual functionality, toylibrary is pretty useless (it contains * *scripts* This directory is inteneded to hold executable scripts. In this case it contains the script, `toyscript.py`. The `python` script demonstrates how the C++ code exposed in `toy_wrapper.cpp` can be used from within `python`. -If your package contains applications intended not for testing but for the end user, you'd put the code for it in a subdirectory called `apps/`. toylibrary doesn't have this type of application, but, e.g., the appfwk package does. Similarly, plugins not intended for testing but for the end user would go in `plugins/`. +If your package contains applications intended not for testing but for the end user, you'd put the code for it in a subdirectory called `apps/`. toylibrary doesn't have this type of application, but, e.g., the appfwk package does. Similarly, plugins not intended for testing but for the end user would go in `plugins/`. ### Coding rules -Along with having a standard directory structure, the C++ code itself in toylibrary conforms to the [DUNE C++ Style Guide](https://dune-daq-sw.readthedocs.io/en/latest/packages/styleguide/). Here, "style" doesn't mean whitespace and formatting, but rather, a set of Modern C++ best practices designed to make your code more robust against bugs, easier to extend, easier to reuse, etc. The DUNE C++ Style Guide is derived from the Google C++ Style Guide, but is greatly simplified and has been modified to be more appropriate to the DUNE DAQ project than Google's projects. Code which is merged into a package's git develop branch should be in conformance with the guide; while it's encouraged for code on a package's unmerged feature branches to also be in conformance, this is less important. +Along with having a standard directory structure, the C++ code itself in toylibrary conforms to the [DUNE C++ Style Guide](https://dune-daq-sw.readthedocs.io/en/latest/packages/styleguide/). Here, "style" doesn't mean whitespace and formatting, but rather, a set of Modern C++ best practices designed to make your code more robust against bugs, easier to extend, easier to reuse, etc. The DUNE C++ Style Guide is derived from the Google C++ Style Guide, but is greatly simplified and has been modified to be more appropriate to the DUNE DAQ project than Google's projects. Code which is merged into a package's git develop branch should be in conformance with the guide; while it's encouraged for code on a package's unmerged feature branches to also be in conformance, this is less important. ### Your project's CMakeLists.txt file -Every DUNE DAQ package should have one and only one `CMakeLists.txt` file, in the base directory of the package's repo (not to be confused with the base directory of the overall development area). To learn a bit about what that `CMakeLists.txt` file should look like, let's take a look at `sourcecode/toylibrary/CMakeLists.txt`. Because CMake is widely used and extensively documented online, this documentation will primarily focus on DUNE-specific CMake functions. The full documentation of the DUNE-specific CMake functions for users can be found [below](#cmake_function_descriptions). Depending on your learning style, however, you may find it easier to start learning about some of what these functions are capable of by reading on without skipping. +Every DUNE DAQ package should have one and only one `CMakeLists.txt` file, in the base directory of the package's repo (not to be confused with the base directory of the overall development area). To learn a bit about what that `CMakeLists.txt` file should look like, let's take a look at `sourcecode/toylibrary/CMakeLists.txt`. Because CMake is widely used and extensively documented online, this documentation will primarily focus on DUNE-specific CMake functions. The full documentation of the DUNE-specific CMake functions for users can be found [below](#cmake_function_descriptions). Depending on your learning style, however, you may find it easier to start learning about some of what these functions are capable of by reading on without skipping. -At the top of `CMakeLists.txt`: before doing anything else, we want to define the minimum version of CMake used (currently 3.12, which supports [modern CMake style](https://cliutils.gitlab.io/modern-cmake/)) as well as the name and version of the project. Concerning the version: it may not literally be the case that the code you're working with is exactly the same as the version-in-question's release code, because you may be on a feature branch, or there may have been commits to the develop branch since the last release. +At the top of `CMakeLists.txt`: before doing anything else, we want to define the minimum version of CMake used (currently 3.12, which supports [modern CMake style](https://cliutils.gitlab.io/modern-cmake/)) as well as the name and version of the project. Concerning the version: it may not literally be the case that the code you're working with is exactly the same as the version-in-question's release code, because you may be on a feature branch, or there may have been commits to the develop branch since the last release. ``` cmake_minimum_required(VERSION 3.12) project(toylibrary VERSION 1.1.0) @@ -129,7 +121,7 @@ Next, we want to make CMake functions written specifically for DUNE DAQ developm ``` find_package(daq-cmake REQUIRED) ``` -This is how we ensure that the `CMakeLists.txt` file has access to the standard DUNE DAQ CMake functions previously mentioned. When `find_package` is called here it imports daq-cmake's `DAQ` CMake module. Note that by convention all functions/macros within the module begin with `daq_`, so as to distinguish them from functions/macros from CMake modules written outside of DUNE DAQ. +This is how we ensure that the `CMakeLists.txt` file has access to the standard DUNE DAQ CMake functions previously mentioned. When `find_package` is called here it imports daq-cmake's `DAQ` CMake module. Note that by convention all functions/macros within the module begin with `daq_`, so as to distinguish them from functions/macros from CMake modules written outside of DUNE DAQ. The next step is to call a macro from the `DAQ` module which sets up a standard DUNE CMake environment for your `CMakeLists.txt` file: ``` @@ -141,41 +133,41 @@ Among other things daq_setup_environment() will do the following: * Ensure all code within the project can find the project's public headers -* Allow our linter scripts to work with the code +* Allow our linter scripts to work with the code * Have gcc use standard warnings * Support the use of CTest for the unit tests -Next you'll see calls to CMake's [find_package](https://cmake.org/cmake/help/v3.17/command/find_package.html) function, which makes toylibrary's dependencies available. Comments in the file explain why the dependencies are selected. +Next you'll see calls to CMake's [find_package](https://cmake.org/cmake/help/v3.17/command/find_package.html) function, which makes toylibrary's dependencies available. Comments in the file explain why the dependencies are selected. Please note that when developing your own package, if it's part of the nightly build and you add a new dependency, besides adding the needed `find_package` call you should also alert Software Coordination so they can add the dependency to the Spack build of the package. See below in the section called "Installing your project as a local package" for more about dependencies. -Then, you'll see a call to a function called `daq_add_library`. +Then, you'll see a call to a function called `daq_add_library`. ``` daq_add_library(IntPrinter.cpp LINK_LIBRARIES logging::logging) ``` -What `daq_add_library` does here is create the main project library. It looks in the project's `./src` subdirectory for a file called `IntPrinter.cpp`, which it then compiles and links against the DUNE DAQ logging library. The result is output in the installation area (`$DBT_INSTALL_DIR`) as a shared object library named after the project itself, `toylibrary/lib64/libtoylibrary.so`. +What `daq_add_library` does here is create the main project library. It looks in the project's `./src` subdirectory for a file called `IntPrinter.cpp`, which it then compiles and links against the DUNE DAQ logging library. The result is output in the installation area (`$DBT_INSTALL_DIR`) as a shared object library named after the project itself, `toylibrary/lib64/libtoylibrary.so`. The next function you see called in the CMakeLists.txt file is `daq_add_python_bindings`: ``` -daq_add_python_bindings( toy_wrapper.cpp LINK_LIBRARIES ${PROJECT_NAME} ) +daq_add_python_bindings( toy_wrapper.cpp ) ``` -which is a function designed to allow the binding of C++ code to python. To do so, it relies on the header only library, `pybind11`. The function expects to find the source files exposing the C++ code, in the package directory, `pybindsrc`. In this `toylibrary` case, we have specified that the bindings are located in the file `toy_wrapper.cpp`. The resulting compiled file will be called, `_daq_${PROJECT_NAME}_py.so`, and will be placed in the output installation subdirectory, `${PROJECT_NAME}/lib64/python/${PROJECT_NAME}`. Similarly to `daq_add_library`, `_daq_${PROJECT_NAME}_py.so` will be linked against the libraries specified after `LINK_LIBRARIES`. For how to import the exposed C++ in, see detailed description section. After the call of `daq_add_python_bindings`, you will see the call to the function `daq_add_application`. +which is a function designed to allow the binding of C++ code to python. To do so, it relies on the header only library, `pybind11`. The function expects to find the source files exposing the C++ code, in the package directory, `pybindsrc`. In this `toylibrary` case, we have specified that the bindings are located in the file `toy_wrapper.cpp`. The resulting compiled file will be called, `_daq_${PROJECT_NAME}_py.so`, and will be placed in the output installation subdirectory, `${PROJECT_NAME}/lib64/python/${PROJECT_NAME}`. Similarly to `daq_add_library`, `_daq_${PROJECT_NAME}_py.so` will be linked against the libraries specified after `LINK_LIBRARIES`; however, it will also automatically link against the main package library. For how to import the exposed C++ in, see detailed description section. After the call of `daq_add_python_bindings`, you will see the call to the function `daq_add_application`. ``` daq_add_application( toylibrary_test_program toylibrary_test_program.cxx TEST LINK_LIBRARIES ${Boost_PROGRAM_OPTIONS_LIBRARY} ${PROJECT_NAME} ) ``` -which searches in the projects' `test/apps/` subdirectory for a file called `toylibrary_test_program.cxx`, builds it, and links against the project's main library which we created via the previous `daq_add_library` command as well as a Boost library used to parse program input. The output application is named after the first argument to the function, `toylibrary_test_program`; it can be found in `$DBT_INSTALL_DIR/toylibrary/test/bin/toylibrary_test_program`. Note that if the "TEST" argument hadn't been supplied, the build system would have looked in a subdirectory of the project called `apps/` rather than `test/apps/` for the source file. +which searches in the projects' `test/apps/` subdirectory for a file called `toylibrary_test_program.cxx`, builds it, and links against the project's main library which we created via the previous `daq_add_library` command as well as a Boost library used to parse program input. The output application is named after the first argument to the function, `toylibrary_test_program`; it can be found in `$DBT_INSTALL_DIR/toylibrary/test/bin/toylibrary_test_program`. Note that if the "TEST" argument hadn't been supplied, the build system would have looked in a subdirectory of the project called `apps/` rather than `test/apps/` for the source file. Another function currently provided by the DAQ CMake module is `daq_add_unit_test`. Examples of this function's use can be found at the bottom of the `sourcecode/toylibrary/CMakeLists.txt` file, e.g.: ``` daq_add_unit_test(ValueWrapper_test) ``` -If you pass this function a name, e.g., `MyComponent_test`, it will create a unit test executable off of a source file called `sourcecode//unittest/MyComponent_test.cxx`, and handle linking in the Boost unit test dependencies. You can also optionally have it link in other libraries by providing them after the `LINK_LIBRARIES` argument as in other functions; in the above example, this isn't needed because ValueWrapper is a template class which is instantiated within the unit test code itself. +If you pass this function a name, e.g., `MyComponent_test`, it will create a unit test executable off of a source file called `sourcecode//unittest/MyComponent_test.cxx`, and handle linking in the Boost unit test dependencies. You can also optionally have it link in other libraries by providing them after the `LINK_LIBRARIES` argument as in other functions; in the above example, this isn't needed because ValueWrapper is a template class which is instantiated within the unit test code itself. At the bottom of CMakeLists.txt, you'll see the following function: ``` daq_install() ``` -When you call it it will install the targets (executables, shared object libraries) you wish to make available to others who want to use your package in a directory called `$DBT_INSTALL_DIR/` (by default that would be `./install/toylibrary`). You'll also need to add a special file to your project for this function to work; this is discussed more fully in the "Installing your project as a local package" section later in this document. +When you call it it will install the targets (executables, shared object libraries) you wish to make available to others who want to use your package in a directory called `$DBT_INSTALL_DIR/` (by default that would be `./install/toylibrary`). You'll also need to add a special file to your project for this function to work; this is discussed more fully in the "Installing your project as a local package" section later in this document. ### Installing your project as a local package @@ -185,7 +177,7 @@ Use the procedure described below in order to have your package installed. Once find_package(mypackage) ``` -For starters, you'll want to call the DAQ module's `daq_install()` function at the bottom of your CMakeLists.txt file, as described earlier in this document. +For starters, you'll want to call the DAQ module's `daq_install()` function at the bottom of your CMakeLists.txt file, as described earlier in this document. A major thing you should be aware of is that when you call CMake's `find_package` function, it will look for a file with the name `mypackageConfig.cmake` in a predetermined set of directories, including the one you defined (or allowed to default to `./install`) when you initially set up your development area as described elsewhere in the documentation. What a standard `mypackageConfig.cmake` file should look like with modern CMake is documented in many places on the web, but in order to make life as easy as possible there's a templatized version of this file in the daq-cmake package. Assuming you've got a `./sourcecode/mypackage` repo in your development area, you can do the following: ``` @@ -202,26 +194,26 @@ and then let's look at the opening lines of `mypackageConfig.cmake.in`: include(CMakeFindDependencyMacro) -# Insert find_dependency() calls for your package's dependencies in -# the place of this comment. Make sure they match up with the -# find_package calls in your package's CMakeLists.txt file +# Insert find_dependency() calls for your package's dependencies in +# the place of this comment. Make sure they match up with the +# find_package calls in your package's CMakeLists.txt file ``` -The only part of this file you need to worry about is the "Insert find_dependency()..." comment. In place of this comment, you'll want to call CMake's `find_dependency` function (details [here](https://cmake.org/cmake/help/latest/module/CMakeFindDependencyMacro.html)) for each package that mypackage depends on; this ensures that developers who call `find_package(mypackage)` don't need to have explicit `find_package` calls on these dependencies. +The only part of this file you need to worry about is the "Insert find_dependency()..." comment. In place of this comment, you'll want to call CMake's `find_dependency` function (details [here](https://cmake.org/cmake/help/latest/module/CMakeFindDependencyMacro.html)) for each package that mypackage depends on; this ensures that developers who call `find_package(mypackage)` don't need to have explicit `find_package` calls on these dependencies. Please note that if you want to _drop_ a dependency from your package, not only should you remove the relevant `find_package` call from `CMakeLists.txt`, you should also remove the corresponding `find_dependency` call in your `Config.cmake.in` file. You can see a simple example of this kind of file with `toylibrary/cmake/toylibraryConfig.cmake.in`. -Once you've edited this file as described, from the base of your development area you can then run +Once you've edited this file as described, from the base of your development area you can then run ``` dbt-build ``` -without receiving an error message informing you that installation isn't an option. +without receiving an error message informing you that installation isn't an option. ## Description of the CMake functions provided by `daq-cmake` ### daq_setup_environment: -Usage: +Usage: ``` daq_setup_environment() @@ -229,8 +221,8 @@ daq_setup_environment() This macro should be called immediately after this DAQ module is included in your DUNE DAQ project's CMakeLists.txt file; it ensures -that DUNE DAQ projects all have a common build environment. It takes -no arguments. +that DUNE DAQ projects all have a common build environment. It takes +no arguments. ### daq_codegen: @@ -238,42 +230,42 @@ Usage: ``` daq_codegen( ... [TEST] [DEP_PKGS ...] [MODEL ] [TEMPLATES