From c52b163c1e63ed7639618078b1ad45976beb411c Mon Sep 17 00:00:00 2001 From: Robert Muenzer Date: Fri, 19 Jan 2024 12:06:06 +0100 Subject: [PATCH 001/101] Make nlanes in krypton workflow setable and put default to 36 --- DATA/testing/detectors/TPC/tpc-krypton.sh | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/DATA/testing/detectors/TPC/tpc-krypton.sh b/DATA/testing/detectors/TPC/tpc-krypton.sh index 4d987d602..f7e871611 100755 --- a/DATA/testing/detectors/TPC/tpc-krypton.sh +++ b/DATA/testing/detectors/TPC/tpc-krypton.sh @@ -17,7 +17,7 @@ PROXY_INSPEC="A:TPC/RAWDATA;dd:FLP/DISTSUBTIMEFRAME/0" CALIB_INSPEC="A:TPC/RAWDATA;dd:FLP/DISTSUBTIMEFRAME/0" -NLANES=1 +NLANES=36 SESSION="default" ARGS_FILES="keyval.output_dir=/dev/null" @@ -32,6 +32,12 @@ if [[ ! -z ${TPC_KRYPTON_NO_WRITEOUT:-} ]]; then WRITER_TYPE="--writer-type none" fi + +if [[ ! -z ${TPC_KRYPTON_LANES:-} ]]; then + NLANES=${TPC_KRYPTON_LANES} +fi + + # TODO use add_W function from gen_topo_helper_functions.sh to assemble workflow # as done for example in https://github.com/AliceO2Group/O2DPG/blob/master/DATA/production/calib/its-threshold-processing.sh From 388cc482b4a52db118459a0b611e22839364f9d2 Mon Sep 17 00:00:00 2001 From: Robert Muenzer Date: Fri, 19 Jan 2024 15:02:34 +0100 Subject: [PATCH 002/101] Fix QC inclusion in workflow --- DATA/testing/detectors/TPC/tpc-krypton.sh | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/DATA/testing/detectors/TPC/tpc-krypton.sh b/DATA/testing/detectors/TPC/tpc-krypton.sh index f7e871611..5db7bf581 100755 --- a/DATA/testing/detectors/TPC/tpc-krypton.sh +++ b/DATA/testing/detectors/TPC/tpc-krypton.sh @@ -6,7 +6,6 @@ source common/getCommonArgs.sh source common/gen_topo_helper_functions.sh - export GLOBAL_SHMSIZE=$(( 128 << 30 )) # GB for the global SHMEM # for kr cluster finder if [ $NUMAGPUIDS != 0 ]; then @@ -22,8 +21,8 @@ SESSION="default" ARGS_FILES="keyval.output_dir=/dev/null" -#QC_CONFIG="consul-json://alio2-cr1-hv-con01.cern.ch:8500/o2/components/qc/ANY/any/tpc-krypton-qcmn" -QC_CONFIG="/o2/components/qc/ANY/any/tpc-krypton-qcmn" +QC_CONFIG="consul-json://alio2-cr1-hv-con01.cern.ch:8500/o2/components/qc/ANY/any/tpc-krypton-qcmn" +#QC_CONFIG="/o2/components/qc/ANY/any/tpc-krypton-qcmn" WRITER_TYPE="--writer-type EPN --meta-output-dir $EPN2EOS_METAFILES_DIR --output-dir $CALIB_DIR" @@ -48,7 +47,7 @@ WORKFLOW= add_W o2-dpl-raw-proxy "--dataspec \"$PROXY_INSPEC\" --inject-missing-data --channel-config \"name=readout-proxy,type=pull,method=connect,address=ipc://@tf-builder-pipe-0,transport=shmem,rateLogging=1\"" "" 0 add_W o2-tpc-raw-to-digits-workflow "--ignore-grp --input-spec \"$CALIB_INSPEC\" --remove-duplicates --pipeline tpc-raw-to-digits-0:20 " "\"${ARGS_FILES}\";TPCDigitDump.LastTimeBin=14256" add_W o2-tpc-krypton-clusterer "${WRITER_TYPE} --lanes $NLANES --configFile=\"/home/wiechula/processData/inputFilesTracking/krypton/krBoxCluster.largeBox.cuts.krMap.ini\"" "\"${ARGS_FILES}\"" -add_QC_from_consul "${QC_CONFIG}" "--local --host lcoalhost" +add_W o2-qc "--config $QC_CONFIG --local --host localhost" WORKFLOW+="o2-dpl-run ${ARGS_ALL} ${GLOBALDPLOPT}" if [ $WORKFLOWMODE == "print" ]; then @@ -60,7 +59,7 @@ else eval $WORKFLOW fi -##o2-dpl-raw-proxy $ARGS_ALL \ +#o2-dpl-raw-proxy $ARGS_ALL \ # --dataspec "$PROXY_INSPEC" --inject-missing-data \ # --readout-proxy "--channel-config 'name=readout-proxy,type=pull,method=connect,address=ipc://@tf-builder-pipe-0,transport=shmem,rateLogging=1'" \ # | o2-tpc-raw-to-digits-workflow $ARGS_ALL \ @@ -73,6 +72,6 @@ fi # ${WRITER_TYPE} \ # --lanes $NLANES \ # --configKeyValues "$ARGS_FILES" \ -## --configFile="/home/wiechula/processData/inputFilesTracking/krypton/krBoxCluster.largeBox.cuts.krMap.ini" \ +# --configFile="/home/wiechula/processData/inputFilesTracking/krypton/krBoxCluster.largeBox.cuts.krMap.ini" \ # | o2-qc $ARGS_ALL --config $QC_CONFIG --local --host localhost \ # | o2-dpl-run $ARGS_ALL --dds ${WORKFLOWMODE_FILE} ${GLOBALDPLOPT} From 1b9da5ea32795351deea1bd673b94f1db6c8a312 Mon Sep 17 00:00:00 2001 From: Robert Muenzer Date: Fri, 26 Jan 2024 08:57:21 +0100 Subject: [PATCH 003/101] add setable LANES to krypton raw workflow --- DATA/testing/detectors/TPC/tpc-krypton-raw.sh | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/DATA/testing/detectors/TPC/tpc-krypton-raw.sh b/DATA/testing/detectors/TPC/tpc-krypton-raw.sh index f62b09d57..a502ebf7e 100755 --- a/DATA/testing/detectors/TPC/tpc-krypton-raw.sh +++ b/DATA/testing/detectors/TPC/tpc-krypton-raw.sh @@ -15,8 +15,14 @@ SESSION="default" ARGS_FILES="keyval.output_dir=/dev/null" HOST=localhost -#QC_CONFIG="consul-json://alio2-cr1-hv-con01.cern.ch:8500/o2/components/qc/ANY/any/tpc-krypton-raw-qcmn" -QC_CONFIG="/o2/components/qc/ANY/any/tpc-krypton-raw-qcmn" + + +if [[ ! -z ${TPC_KRYPTON_LANES:-} ]]; then + NLANES=${TPC_KRYPTON_LANES} +fi + +QC_CONFIG="consul-json://alio2-cr1-hv-con01.cern.ch:8500/o2/components/qc/ANY/any/tpc-krypton-raw-qcmn" +#QC_CONFIG="/o2/components/qc/ANY/any/tpc-krypton-raw-qcmn" @@ -35,7 +41,7 @@ WORKFLOW= add_W o2-dpl-raw-proxy "--dataspec \"$PROXY_INSPEC\" --inject-missing-data --channel-config \"name=readout-proxy,type=pull,method=connect,address=ipc://@tf-builder-pipe-0,transport=shmem,rateLogging=1\"" "" 0 add_W o2-tpc-raw-to-digits-workflow "--ignore-grp --input-spec \"$CALIB_INSPEC\" --remove-duplicates --pedestal-url \"http://o2-ccdb.internal\" --pipeline tpc-raw-to-digits-0:24 " "\"${ARGS_FILES}\";TPCDigitDump.LastTimeBin=446" add_W o2-tpc-krypton-raw-filter "${WRITER_TYPE} --lanes $NLANES --threshold-max 20 --time-bins-before 20" "\"${ARGS_FILES}\"" -add_QC_from_consul "${QC_CONFIG}" "--local --host lcoalhost" +add_W o2-qc "--config $QC_CONFIG --local --host localhost" WORKFLOW+="o2-dpl-run ${ARGS_ALL} ${GLOBALDPLOPT}" if [ $WORKFLOWMODE == "print" ]; then From acd6d720a1bb6d0f436d99d970a604b1ef5ef164 Mon Sep 17 00:00:00 2001 From: Robert Muenzer Date: Tue, 30 Jan 2024 22:15:16 +0100 Subject: [PATCH 004/101] Changed the parameter calling for Writeout disabling --- DATA/testing/detectors/TPC/tpc-krypton-raw.sh | 2 +- DATA/testing/detectors/TPC/tpc-krypton.sh | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/DATA/testing/detectors/TPC/tpc-krypton-raw.sh b/DATA/testing/detectors/TPC/tpc-krypton-raw.sh index a502ebf7e..dd92226c4 100755 --- a/DATA/testing/detectors/TPC/tpc-krypton-raw.sh +++ b/DATA/testing/detectors/TPC/tpc-krypton-raw.sh @@ -28,7 +28,7 @@ QC_CONFIG="consul-json://alio2-cr1-hv-con01.cern.ch:8500/o2/components/qc/ANY/an WRITER_TYPE="--writer-type EPN --meta-output-dir $EPN2EOS_METAFILES_DIR --output-dir $CALIB_DIR --max-tf-per-file 8000" -if [[ ! -z ${TPC_KRYPTON_NO_WRITEOUT:-} ]]; then +if [[ "${TPC_KRYPTON_NO_WRITEOUT:-}" == "1" ]]; then WRITER_TYPE="--writer-type none" fi diff --git a/DATA/testing/detectors/TPC/tpc-krypton.sh b/DATA/testing/detectors/TPC/tpc-krypton.sh index 5db7bf581..f7649a0fd 100755 --- a/DATA/testing/detectors/TPC/tpc-krypton.sh +++ b/DATA/testing/detectors/TPC/tpc-krypton.sh @@ -27,7 +27,7 @@ QC_CONFIG="consul-json://alio2-cr1-hv-con01.cern.ch:8500/o2/components/qc/ANY/an WRITER_TYPE="--writer-type EPN --meta-output-dir $EPN2EOS_METAFILES_DIR --output-dir $CALIB_DIR" -if [[ ! -z ${TPC_KRYPTON_NO_WRITEOUT:-} ]]; then +if [[ "${TPC_KRYPTON_NO_WRITEOUT:-}" == "1" ]]; then WRITER_TYPE="--writer-type none" fi From 224e43dd6d5c34b739476fca4bc0c1f856209d7c Mon Sep 17 00:00:00 2001 From: Andreas Molander Date: Wed, 31 Jan 2024 13:34:56 +0200 Subject: [PATCH 005/101] Add FT0 QA to analysis QC (#1423) --- .../analysis_testing/json/analyses_config.json | 14 ++++++++++++++ .../json/default/pbpb/analysis-testing-data.json | 3 +++ .../json/default/pbpb/analysis-testing-mc.json | 3 +++ .../json/default/pp/analysis-testing-data.json | 3 +++ .../json/default/pp/analysis-testing-mc.json | 3 +++ 5 files changed, 26 insertions(+) diff --git a/MC/config/analysis_testing/json/analyses_config.json b/MC/config/analysis_testing/json/analyses_config.json index e2721c661..63c67b285 100644 --- a/MC/config/analysis_testing/json/analyses_config.json +++ b/MC/config/analysis_testing/json/analyses_config.json @@ -245,6 +245,20 @@ "o2-analysis-pid-tpc-full", "o2-analysis-pid-tpc-base" ] + }, + { + "name": "FT0QA", + "enabled": true, + "expected_output": ["AnalysisResults.root"], + "valid_mc": true, + "valid_data": true, + "tasks": ["o2-analysis-timestamp", + "o2-analysis-track-propagation", + "o2-analysis-trackselection", + "o2-analysis-event-selection", + "o2-analysis-multiplicity-table", + "o2-analysis-ft0-corrected-table", + "o2-analysis-ft0-qa"] } ] } diff --git a/MC/config/analysis_testing/json/default/pbpb/analysis-testing-data.json b/MC/config/analysis_testing/json/default/pbpb/analysis-testing-data.json index 69a6cd245..3b8c49112 100644 --- a/MC/config/analysis_testing/json/default/pbpb/analysis-testing-data.json +++ b/MC/config/analysis_testing/json/default/pbpb/analysis-testing-data.json @@ -513,5 +513,8 @@ "produceTable": "-1", "ptMax": "1e+10", "ptMin": "0.1" + }, + "ft0-qa": { + "isLowFlux": "false" } } diff --git a/MC/config/analysis_testing/json/default/pbpb/analysis-testing-mc.json b/MC/config/analysis_testing/json/default/pbpb/analysis-testing-mc.json index c97f206f3..33d4017a6 100644 --- a/MC/config/analysis_testing/json/default/pbpb/analysis-testing-mc.json +++ b/MC/config/analysis_testing/json/default/pbpb/analysis-testing-mc.json @@ -1378,5 +1378,8 @@ "produceFBextendedTable": "-1", "ptMax": "1e+10", "ptMin": "0.1" + }, + "ft0-qa": { + "isLowFlux": "false" } } diff --git a/MC/config/analysis_testing/json/default/pp/analysis-testing-data.json b/MC/config/analysis_testing/json/default/pp/analysis-testing-data.json index c754b963c..817d56da9 100644 --- a/MC/config/analysis_testing/json/default/pp/analysis-testing-data.json +++ b/MC/config/analysis_testing/json/default/pp/analysis-testing-data.json @@ -502,5 +502,8 @@ "produceTable": "-1", "ptMax": "1e+10", "ptMin": "0.1" + }, + "ft0-qa": { + "isLowFlux": "true" } } diff --git a/MC/config/analysis_testing/json/default/pp/analysis-testing-mc.json b/MC/config/analysis_testing/json/default/pp/analysis-testing-mc.json index 74e9247e9..4ab7676e0 100644 --- a/MC/config/analysis_testing/json/default/pp/analysis-testing-mc.json +++ b/MC/config/analysis_testing/json/default/pp/analysis-testing-mc.json @@ -1377,5 +1377,8 @@ "produceFBextendedTable": "-1", "ptMax": "1e+10", "ptMin": "0.1" + }, + "ft0-qa": { + "isLowFlux": "true" } } From eb3591632fe75ba65ff68353984839c22826a89c Mon Sep 17 00:00:00 2001 From: catalinristea Date: Wed, 31 Jan 2024 17:22:39 +0200 Subject: [PATCH 006/101] Update anchorMC.sh - removing the use of ideal MFT alignment (#1420) * Update anchorMC.sh - removing the use of ideal MFT alignment * Update anchorMC.sh - removed CCDB prefetching --- MC/run/ANCHOR/anchorMC.sh | 19 +++---------------- 1 file changed, 3 insertions(+), 16 deletions(-) diff --git a/MC/run/ANCHOR/anchorMC.sh b/MC/run/ANCHOR/anchorMC.sh index 0892bf906..d0beecc0a 100755 --- a/MC/run/ANCHOR/anchorMC.sh +++ b/MC/run/ANCHOR/anchorMC.sh @@ -212,21 +212,8 @@ fi TIMESTAMP=`grep "Determined timestamp to be" timestampsampling_${ALIEN_JDL_LPMRUNNUMBER}.log | awk '//{print $6}'` echo "TIMESTAMP IS ${TIMESTAMP}" -# -- PREFETCH CCDB OBJECTS TO DISC -- -# (make sure the right objects at the right timestamp are fetched -# until https://alice.its.cern.ch/jira/browse/O2-2852 is fixed) -# NOTE: In fact, looking at the ticket, it should be fixed now. However, not changing at the moment as further tests would be needed to confirm here. - -CCDBOBJECTS="/CTP/Calib/OrbitReset /GLO/Config/GRPMagField/ /GLO/Config/GRPLHCIF /ITS/Calib/DeadMap /ITS/Calib/NoiseMap /ITS/Calib/ClusterDictionary /TPC/Calib/PadGainFull /TPC/Calib/TopologyGain /TPC/Calib/TimeGain /TPC/Calib/PadGainResidual /TPC/Config/FEEPad /TOF/Calib/Diagnostic /TOF/Calib/LHCphase /TOF/Calib/FEELIGHT /TOF/Calib/ChannelCalib /MFT/Calib/DeadMap /MFT/Calib/NoiseMap /MFT/Calib/ClusterDictionary /FV0/Calibration/ChannelTimeOffset" - -${O2_ROOT}/bin/o2-ccdb-downloadccdbfile --host http://alice-ccdb.cern.ch/ -p ${CCDBOBJECTS} -d ${ALICEO2_CCDB_LOCALCACHE} --timestamp ${TIMESTAMP} -if [ ! "$?" == "0" ]; then - echo "Problem during CCDB prefetching of ${CCDBOBJECTS}. Exiting." - exit 1 -fi - -# -- Create aligned geometry using ITS and MFT ideal alignments to avoid overlaps in geant -CCDBOBJECTS_IDEAL_MC="ITS/Calib/Align MFT/Calib/Align" +# -- Create aligned geometry using ITS ideal alignment to avoid overlaps in geant +CCDBOBJECTS_IDEAL_MC="ITS/Calib/Align" TIMESTAMP_IDEAL_MC=1 ${O2_ROOT}/bin/o2-ccdb-downloadccdbfile --host http://alice-ccdb.cern.ch/ -p ${CCDBOBJECTS_IDEAL_MC} -d ${ALICEO2_CCDB_LOCALCACHE} --timestamp ${TIMESTAMP_IDEAL_MC} if [ ! "$?" == "0" ]; then @@ -234,7 +221,7 @@ if [ ! "$?" == "0" ]; then exit 1 fi -echo "run with echo in pipe" | ${O2_ROOT}/bin/o2-create-aligned-geometry-workflow --configKeyValues "HBFUtils.startTime=${TIMESTAMP}" --condition-remap=file://${ALICEO2_CCDB_LOCALCACHE}=ITS/Calib/Align,MFT/Calib/Align -b +echo "run with echo in pipe" | ${O2_ROOT}/bin/o2-create-aligned-geometry-workflow --configKeyValues "HBFUtils.startTime=${TIMESTAMP}" --condition-remap=file://${ALICEO2_CCDB_LOCALCACHE}=ITS/Calib/Align -b mkdir -p $ALICEO2_CCDB_LOCALCACHE/GLO/Config/GeometryAligned ln -s -f $PWD/o2sim_geometry-aligned.root $ALICEO2_CCDB_LOCALCACHE/GLO/Config/GeometryAligned/snapshot.root From f7aa7fc4a70889c8f4c177bee72ff8886469bc73 Mon Sep 17 00:00:00 2001 From: swenzel Date: Wed, 31 Jan 2024 15:57:31 +0100 Subject: [PATCH 007/101] Possibility to take external config for Pythia8 In case of generator pythia8, we have so far always constructed a Pythia8 config file from the parameters given to o2dpg_sim_workflow.py However, some expert users may want to use an external configuration for Pythia8. This commit provides the possibility to do so via sensitivity to the `GeneratorPythia8.config` ConfigurableParam (so far ignored). An example is: ``` ${O2DPG_ROOT}/MC/bin/o2dpg_sim_workflow.py -eCM 14000 -col pp -gen pythia8 -proc cdiff -tf 2 \ -ns 20 -e ${SIMENGINE} \ -j ${NWORKERS} -interactionRate 500000 \ -run 302000 -seed 624 \ -confKey "GeneratorPythia8.config=/SOMEPATH/pythia8_powheg.cfg" ``` The new feature allows expert studies with specially setup Pythia8 configs. The development was motivated from https://its.cern.ch/jira/browse/O2-4549 However, note that options `-proc` `-eCM` etc. might have no effect or are ignored in such cases. --- MC/bin/o2dpg_sim_config.py | 2 ++ MC/bin/o2dpg_sim_workflow.py | 48 +++++++++++++++++++++++------------- 2 files changed, 33 insertions(+), 17 deletions(-) diff --git a/MC/bin/o2dpg_sim_config.py b/MC/bin/o2dpg_sim_config.py index d065998e8..22017086d 100755 --- a/MC/bin/o2dpg_sim_config.py +++ b/MC/bin/o2dpg_sim_config.py @@ -97,6 +97,8 @@ def create_geant_config(args, externalConfigString): # creates generic transport simulation config key values # based on arguments args (run number, energy, ...) originally passed # to o2dpg_sim_workflow.py + # + # returns a dictionary of mainkey -> dictionary of subkey : values config = {} def add(cfg, flatconfig): for entry in flatconfig: diff --git a/MC/bin/o2dpg_sim_workflow.py b/MC/bin/o2dpg_sim_workflow.py index 19a2e9b9a..4fe8781ee 100755 --- a/MC/bin/o2dpg_sim_workflow.py +++ b/MC/bin/o2dpg_sim_workflow.py @@ -21,7 +21,7 @@ import importlib.util import argparse from os import environ, mkdir, getcwd -from os.path import join, dirname, isdir +from os.path import join, dirname, isdir, isabs import random import json import itertools @@ -505,6 +505,9 @@ def getDPL_global_options(bigshm=False, ccdbbackend=True): workflow['stages'].append(TPC_SPACECHARGE_DOWNLOADER_TASK) +# query initial configKey args for signal transport; mainly used to setup generators +simInitialConfigKeys = create_geant_config(args, args.confKey) + # loop over timeframes for tf in range(1, NTIMEFRAMES + 1): TFSEED = SIMSEED + tf @@ -627,19 +630,30 @@ def getDPL_global_options(bigshm=False, ccdbbackend=True): SGN_CONFIG_task=createTask(name='gensgnconf_'+str(tf), tf=tf, cwd=timeframeworkdir) SGN_CONFIG_task['cmd'] = 'echo "placeholder / dummy task"' if GENERATOR == 'pythia8': - SGN_CONFIG_task['cmd'] = '${O2DPG_ROOT}/MC/config/common/pythia8/utils/mkpy8cfg.py \ - --output=pythia8.cfg \ - --seed='+str(TFSEED)+' \ - --idA='+str(PDGA)+' \ - --idB='+str(PDGB)+' \ - --eCM='+str(ECMS)+' \ - --eA='+str(EBEAMA)+' \ - --eB='+str(EBEAMB)+' \ - --process='+str(PROCESS)+' \ - --ptHatMin='+str(PTHATMIN)+' \ - --ptHatMax='+str(PTHATMAX) - if WEIGHTPOW > 0: - SGN_CONFIG_task['cmd'] = SGN_CONFIG_task['cmd'] + ' --weightPow=' + str(WEIGHTPOW) + # see if config is given externally + externalPythia8Config = simInitialConfigKeys.get("GeneratorPythia8", {}).get("config", None) + if externalPythia8Config != None: + # check if this refers to a file with ABSOLUTE path + if not isabs(externalPythia8Config): + print ('Error: Argument to GeneratorPythia8.config must be absolute path') + exit (1) + # in this case, we copy the external config to the local dir (maybe not even necessary) + SGN_CONFIG_task['cmd'] = 'cp ' + externalPythia8Config + ' pythia8.cfg' + else: + SGN_CONFIG_task['cmd'] = '${O2DPG_ROOT}/MC/config/common/pythia8/utils/mkpy8cfg.py \ + --output=pythia8.cfg \ + --seed='+str(TFSEED)+' \ + --idA='+str(PDGA)+' \ + --idB='+str(PDGB)+' \ + --eCM='+str(ECMS)+' \ + --eA='+str(EBEAMA)+' \ + --eB='+str(EBEAMB)+' \ + --process='+str(PROCESS)+' \ + --ptHatMin='+str(PTHATMIN)+' \ + --ptHatMax='+str(PTHATMAX) + if WEIGHTPOW > 0: + SGN_CONFIG_task['cmd'] = SGN_CONFIG_task['cmd'] + ' --weightPow=' + str(WEIGHTPOW) + # if we configure pythia8 here --> we also need to adjust the configuration # TODO: we need a proper config container/manager so as to combine these local configs with external configs etc. args.confKey = args.confKey + ";GeneratorPythia8.config=pythia8.cfg" @@ -647,9 +661,11 @@ def getDPL_global_options(bigshm=False, ccdbbackend=True): # elif GENERATOR == 'extgen': what do we do if generator is not pythia8? # NOTE: Generator setup might be handled in a different file or different files (one per # possible generator) - workflow['stages'].append(SGN_CONFIG_task) + # determine final conf key for signal simulation + CONFKEY = constructConfigKeyArg(create_geant_config(args, args.confKey)) + # ----------------- # transport signals # ----------------- @@ -657,8 +673,6 @@ def getDPL_global_options(bigshm=False, ccdbbackend=True): if (args.pregenCollContext == True): signalneeds.append(PreCollContextTask['name']) - # determine final configKey args for signal transport - CONFKEY = constructConfigKeyArg(create_geant_config(args, args.confKey)) # add embedIntoFile only if embeddPattern does contain a '@' embeddinto= "--embedIntoFile ../bkg_MCHeader.root" if (doembedding & ("@" in args.embeddPattern)) else "" From ec4acee8f0d38616b6fa45809661213c9b938acc Mon Sep 17 00:00:00 2001 From: swenzel Date: Sun, 28 Jan 2024 20:40:05 +0100 Subject: [PATCH 008/101] Clean possibly leaked CCDB semaphores at workflow start Use new feature of O2 to scan for leaked CCDB semaphores related to CCDB caches and clean them up before workflow execution. To this end, expand the __global_init__ mechanism with a "cmd" (not just environment variables) field. The pipeline runner will execute such init command before workflows start. Solves a problem, where second run/pass of workflow running hangs due to previously leaked semaphores. --- MC/bin/o2_dpg_workflow_runner.py | 37 +++++++++++++++++++++++++++----- MC/bin/o2dpg_sim_workflow.py | 4 +++- MC/bin/o2dpg_workflow_utils.py | 2 +- 3 files changed, 36 insertions(+), 7 deletions(-) diff --git a/MC/bin/o2_dpg_workflow_runner.py b/MC/bin/o2_dpg_workflow_runner.py index b53e5bf63..44f56303a 100755 --- a/MC/bin/o2_dpg_workflow_runner.py +++ b/MC/bin/o2_dpg_workflow_runner.py @@ -836,11 +836,12 @@ def __init__(self, workflowfile, args, jmax=100): self.is_productionmode = args.production_mode == True # os.getenv("ALIEN_PROC_ID") != None self.workflowfile = workflowfile self.workflowspec = load_json(workflowfile) - self.globalenv = self.extract_global_environment(self.workflowspec) # initialize global environment settings - for e in self.globalenv: + self.globalinit = self.extract_global_environment(self.workflowspec) # initialize global environment settings + for e in self.globalinit['env']: if os.environ.get(e, None) == None: - actionlogger.info("Applying global environment from init section " + str(e) + " : " + str(self.globalenv[e])) - os.environ[e] = str(self.globalenv[e]) + value = self.globalinit['env'][e] + actionlogger.info("Applying global environment from init section " + str(e) + " : " + str(value)) + os.environ[e] = str(value) # only keep those tasks that are necessary to be executed based on user's filters self.workflowspec = filter_workflow(self.workflowspec, args.target_tasks, args.target_labels) @@ -936,13 +937,33 @@ def extract_global_environment(self, workflowspec): """ init_index = 0 # this has to be the first task in the workflow globalenv = {} + initcmd = None if workflowspec['stages'][init_index]['name'] == '__global_init_task__': env = workflowspec['stages'][init_index].get('env', None) if env != None: globalenv = { e : env[e] for e in env } + cmd = workflowspec['stages'][init_index].get('cmd', None) + if cmd != 'NO-COMMAND': + initcmd = cmd + del workflowspec['stages'][init_index] - return globalenv + return {"env" : globalenv, "cmd" : initcmd } + + def execute_globalinit_cmd(self, cmd): + actionlogger.info("Executing global setup cmd " + str(cmd)) + # perform the global init command (think of cleanup/setup things to be done in any case) + p = subprocess.Popen(['/bin/bash','-c', cmd], stdout=subprocess.PIPE, stderr=subprocess.PIPE) + stdout, stderr = p.communicate() + + # Check if the command was successful (return code 0) + if p.returncode == 0: + actionlogger.info(stdout.decode()) + else: + # this should be an error + actionlogger.error("Error executing global init function") + return False + return True def get_global_task_name(self, name): """ @@ -1579,6 +1600,12 @@ def speedup_ROOT_Init(): self.produce_script(args.produce_script) exit (0) + # execute the user-given global init cmd for this workflow + globalinitcmd = self.globalinit.get("cmd", None) + if globalinitcmd != None: + if not self.execute_globalinit_cmd(globalinitcmd): + exit (1) + if args.rerun_from: reruntaskfound=False for task in self.workflowspec['stages']: diff --git a/MC/bin/o2dpg_sim_workflow.py b/MC/bin/o2dpg_sim_workflow.py index 4fe8781ee..70dcafb71 100755 --- a/MC/bin/o2dpg_sim_workflow.py +++ b/MC/bin/o2dpg_sim_workflow.py @@ -315,7 +315,9 @@ def extractVertexArgs(configKeyValuesStr, finalDiamondDict): globalenv['ALICEO2_CCDB_LOCALCACHE'] = environ.get('ALICEO2_CCDB_LOCALCACHE') globalenv['IGNORE_VALIDITYCHECK_OF_CCDB_LOCALCACHE'] = '${ALICEO2_CCDB_LOCALCACHE:+"ON"}' -workflow['stages'].append(createGlobalInitTask(globalenv)) +globalinittask = createGlobalInitTask(globalenv) +globalinittask['cmd'] = 'o2-ccdb-cleansemaphores -p ${ALICEO2_CCDB_LOCALCACHE}' +workflow['stages'].append(globalinittask) #### def getDPL_global_options(bigshm=False, ccdbbackend=True): diff --git a/MC/bin/o2dpg_workflow_utils.py b/MC/bin/o2dpg_workflow_utils.py index 304bb8234..a029e8dee 100755 --- a/MC/bin/o2dpg_workflow_utils.py +++ b/MC/bin/o2dpg_workflow_utils.py @@ -114,7 +114,7 @@ def dump_workflow(workflow, filename, meta=None): to_dump = deepcopy(workflow) for s in to_dump: - if s["cmd"] and taskwrapper_string not in s["cmd"]: + if s["cmd"] and s["name"] != '__global_init_task__' and taskwrapper_string not in s["cmd"]: # insert taskwrapper stuff if not there already, only do it if cmd string is not empty s['cmd'] = '. ' + taskwrapper_string + ' ' + s['name']+'.log \'' + s['cmd'] + '\'' # remove unnecessary whitespaces for better readibility From 1e452045c48b0ff077ddc8073853b54a5e00f3c9 Mon Sep 17 00:00:00 2001 From: ddobrigk Date: Mon, 29 Jan 2024 19:51:12 +0100 Subject: [PATCH 009/101] Update generator_pythia8_LF.C --- MC/config/PWGLF/pythia8/generator_pythia8_LF.C | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/MC/config/PWGLF/pythia8/generator_pythia8_LF.C b/MC/config/PWGLF/pythia8/generator_pythia8_LF.C index b5550fb5e..c35c539ad 100644 --- a/MC/config/PWGLF/pythia8/generator_pythia8_LF.C +++ b/MC/config/PWGLF/pythia8/generator_pythia8_LF.C @@ -165,7 +165,7 @@ class GeneratorPythia8LF : public o2::eventgen::GeneratorPythia8 if (mGapBetweenInjection > 0) { if (mGapBetweenInjection == 1 && mEventCounter % 2 == 0) { doSignal = false; - } else if (mEventCounter % mGapBetweenInjection != 0) { + } else if (mEventCounter % mGapBetweenInjection + 1 != 0) { doSignal = false; } } From 94e512ce08d48b7785219eb80d7db90d3be5ae43 Mon Sep 17 00:00:00 2001 From: shahoian Date: Thu, 1 Feb 2024 12:08:53 +0100 Subject: [PATCH 010/101] Enable M-shape correction by defualt if any correction is allowed --- .../configurations/asyncReco/setenv_extra.sh | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/DATA/production/configurations/asyncReco/setenv_extra.sh b/DATA/production/configurations/asyncReco/setenv_extra.sh index d9bb33d4b..4c95be58a 100644 --- a/DATA/production/configurations/asyncReco/setenv_extra.sh +++ b/DATA/production/configurations/asyncReco/setenv_extra.sh @@ -315,6 +315,8 @@ elif [[ $ALIGNLEVEL == 1 ]]; then fi # now we set the options + [[ -n "$ALIEN_JDL_MSHAPE_CORRECTION" && $ALIEN_JDL_MSHAPE_CORRECTION == "0" ]] && ENABLE_MSHAPE=0 || ENABLE_MSHAPE=1 + if [[ $INST_IR_FOR_TPC -gt 0 ]]; then # externally imposed IR for scaling echo "Applying externally provided IR for scaling, $INST_IR_FOR_TPC Hz" export TPC_CORR_SCALING+=";TPCCorrMap.lumiInst=$INST_IR_FOR_TPC" @@ -324,6 +326,7 @@ elif [[ $ALIGNLEVEL == 1 ]]; then elif [[ $INST_IR_FOR_TPC -lt 0 ]]; then # do not apply any correction echo "Passed valued for scaling is smaller than zero, no scaling will be applied" echo "NOTA BENE: In the future, this value will signal to not apply any correction at all, which is not operational yet (but please check, as it depends on O2)" + ENABLE_MSHAPE=0 export TPC_CORR_SCALING+=";TPCCorrMap.lumiInst=$INST_IR_FOR_TPC" elif [[ $INST_IR_FOR_TPC == "CTPCCDB" ]]; then # using what we have in the CCDB CTP counters, extracted at the beginning of the script echo "Using CTP CCDB which gave the mean IR of the run at the beginning of the script ($RUN_IR Hz)" @@ -346,6 +349,10 @@ elif [[ $ALIGNLEVEL == 1 ]]; then return 1 fi + if [[ $ENABLE_MSHAPE == "1" ]]; then + export TPC_CORR_SCALING+=" --enable-M-shape-correction " + fi + if [[ -n $ALIEN_JDL_MEANIRFORTPC && $ALIEN_JDL_MEANIRFORTPC > 0 ]]; then # externally imposed TPC map mean IR for scaling export TPC_CORR_SCALING+=";TPCCorrMap.lumiMean=$ALIEN_JDL_MEANIRFORTPC" fi @@ -363,6 +370,9 @@ elif [[ $ALIGNLEVEL == 1 ]]; then return 1 fi fi + if [[ $ENABLE_MSHAPE == "1" ]]; then + export TPC_CORR_SCALING+=" --enable-M-shape-correction " + fi fi echo "Final setting for TPC scaling is:" From 50ec7dc3ae2ae95327f4787fc7ed74cfaaf4f86d Mon Sep 17 00:00:00 2001 From: swenzel Date: Thu, 1 Feb 2024 13:35:05 +0100 Subject: [PATCH 011/101] platform independence of containerized execution --- GRID/utils/runGRIDContainerized.sh | 26 ++++++++++++++++++++++---- 1 file changed, 22 insertions(+), 4 deletions(-) diff --git a/GRID/utils/runGRIDContainerized.sh b/GRID/utils/runGRIDContainerized.sh index ca98f616c..1752f692d 100755 --- a/GRID/utils/runGRIDContainerized.sh +++ b/GRID/utils/runGRIDContainerized.sh @@ -7,13 +7,31 @@ SCRIPT=$1 [ $SCRIPT == "" ] && echo "Please provide a script to run" && exit 1 echo "Trying to run script ${SCRIPT} in a container environment" +# detect architecture (ARM or X86) +ARCH=$(uname -i) +if [ "$ARCH" == "aarch64" ] || [ "$arch" == "x86" ]; then + echo "Detected hardware architecture : $ARCH" +else + echo "Invalid architecture ${ARCH} detected. Exiting" + exit 1 +fi +if [ "$ARCH" == "aarch64" ]; then + ISAARCH64="1" +fi # we just use the default singularity container -APPTAINER_CONTAINER=/cvmfs/alice.cern.ch/containers/fs/singularity/default +APPTAINER_CONTAINER=/cvmfs/alice.cern.ch/containers/fs/singularity/default${ISAARCH64+"-aarch64"} -# create workdir -WORK_DIR=$(mktemp -d /tmp/alien-job-XXXXXX) +# create workdir if not specified externally +if [ ! "${WORK_DIR}" ]; then + WORK_DIR=$(mktemp -d /tmp/alien-job-XXXXXX) +fi echo "This job will be run in $WORK_DIR" +if [ ! -d "${WORK_DIR}" ]; then + echo "working directory ${WORK_DIR} does not exist; Please create before running" + exit 1 +fi + # copy script to WORK_DIR cp ${SCRIPT} ${WORK_DIR}/job.sh @@ -31,5 +49,5 @@ echo "JALIEN_TOKEN_CERT=/workdir/usercert.pem" > ${WORK_DIR}/envfile echo "JALIEN_TOKEN_KEY=/workdir/userkey.pem" >> ${WORK_DIR}/envfile # launch job = script inside the container in the workdir -/cvmfs/alice.cern.ch/containers/bin/apptainer/current/bin/apptainer exec -C -B /cvmfs:/cvmfs,${WORK_DIR}:/workdir \ +/cvmfs/alice.cern.ch/containers/bin/apptainer/current${ISAARCH64+"-aarch64"}/bin/apptainer exec -C -B /cvmfs:/cvmfs,${WORK_DIR}:/workdir \ --pwd /workdir --env-file ${WORK_DIR}/envfile ${APPTAINER_CONTAINER} /workdir/job.sh From 6dcf1448255b35273c1c6355d6a2713f5d9029a9 Mon Sep 17 00:00:00 2001 From: Ole Schmidt Date: Fri, 2 Feb 2024 14:08:00 +0100 Subject: [PATCH 012/101] TPC QC allow proper init of propagator to avoid error message in FST (#1436) --- DATA/production/qc-sync/tpc.json | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/DATA/production/qc-sync/tpc.json b/DATA/production/qc-sync/tpc.json index 5d0b22997..daabaa239 100644 --- a/DATA/production/qc-sync/tpc.json +++ b/DATA/production/qc-sync/tpc.json @@ -110,6 +110,16 @@ "cutMinNCluster": "60", "cutMindEdxTot": "20." }, + "grpGeomRequest" : { + "geomRequest": "Aligned", + "askGRPECS": "false", + "askGRPLHCIF": "false", + "askGRPMagField": "true", + "askMatLUT": "true", + "askTime": "false", + "askOnceAllButField": "true", + "needPropagatorD": "false" + }, "location": "local", "localMachines": [ "localhost", From 319e7f079bdc3caa38069b4c84c4f11742e4827d Mon Sep 17 00:00:00 2001 From: Daniel Samitz <69901155+DanielSamitz@users.noreply.github.com> Date: Fri, 2 Feb 2024 16:39:36 +0100 Subject: [PATCH 013/101] load libpythia6 (#1435) --- MC/config/PWGEM/external/generator/GeneratorEMCocktailV2.C | 1 + 1 file changed, 1 insertion(+) diff --git a/MC/config/PWGEM/external/generator/GeneratorEMCocktailV2.C b/MC/config/PWGEM/external/generator/GeneratorEMCocktailV2.C index 9e2deac45..13770644d 100644 --- a/MC/config/PWGEM/external/generator/GeneratorEMCocktailV2.C +++ b/MC/config/PWGEM/external/generator/GeneratorEMCocktailV2.C @@ -1,4 +1,5 @@ R__ADD_INCLUDE_PATH($O2DPG_ROOT/MC/config/PWGDQ/external/generator) +R__LOAD_LIBRARY(libpythia6) #include "GeneratorCocktail.C" namespace o2 { From 98b4ad04c01a98386578f3b1efbe05f2baf25c93 Mon Sep 17 00:00:00 2001 From: benedikt-voelkel Date: Mon, 5 Feb 2024 09:23:19 +0100 Subject: [PATCH 014/101] Documentation and fixes (#1405) * move documentation to https://aliceo2group.github.io/simulation/docs/relval/ * Adding more comments to sources * re-arrange some code blocks with respect to each other * some bug fixes Co-authored-by: Benedikt Volkel --- RelVal/README.md | 173 +--------- RelVal/o2dpg_release_validation.py | 245 +++++++------- RelVal/utils/o2dpg_release_validation_plot.py | 68 +++- .../utils/o2dpg_release_validation_utils.py | 307 ++++++++++++------ 4 files changed, 394 insertions(+), 399 deletions(-) diff --git a/RelVal/README.md b/RelVal/README.md index b2d1c7d2a..680f9a4db 100644 --- a/RelVal/README.md +++ b/RelVal/README.md @@ -1,174 +1,3 @@ # O2DPG ReleaseValidation (RelVal) -The RelVal is specifically designed to compare 2 sets of QC objects. However, it is also possible to compare ROOT files that contain other objects such as histograms (`TH1`) or also `TTree`s: -* ROOT histograms (deriving from `TH1`) -* ROOT `TProfile` -* ROOT `TEfficiency` -* O2 `o2::quality_control::core::MonitorObjectCollection` -* O2 `o2::quality_control::core::MonitorObject` -* ROOT `TTree` (Here the algorithm does its best to extract as many TLeafs as possible which works when they can be drawn with `TTree::Draw`.) - -Objects from compared files are extracted recursively and so all objects in sub-directories are compared. - -The convention is, that only those objects that have the exact same path are compared to one another so the 2 ROOT files must have the same structure. Note though, that all possible pairs are compared. If there are singular objects in one or the other file, they will be safely ignored. - -At the end of this README are some examples for QC RelVal. - -## Quick start - -To jump right in, please check out [this](#run-for-qc) - -## Definitions - -### Metric -A metric is a way to compare 2 corresponding objects and assign a number to that comparison. There are currently 3 definitions: -1. `chi2`: Chi2 test of compared histograms (see also the [ROOT documentation](https://root.cern.ch/doc/master/classTH1.html#ab7d63c7c177ccbf879b5dc31f2311b27)), -1. `kolmogorov`: shape comparison using Kolmogorov test (see also the [ROOT documentation](https://root.cern.ch/doc/master/classTH1.html#aeadcf087afe6ba203bcde124cfabbee4)), -1. `num_entries`: relative difference in the number of entries. -So for each pair of histograms there can be multiple metrics. - -### Test -A test is the comparison of a computed metric to certain limits (upper,lower). How these limits came about is the property of such a test. For instance, a simple **threshold** test, where lower is better, would mean to have limits of `(, -infty)`. -There can hence be multiple tests for one metric. - -### Interpretation -A test can be assigned an interpretation. There are -1. `GOOD` if a metric passes a test, -1. `WARNING`: if a **non-critical** metric fails a test, -1. `NONCRIT_NC` if the objects could not be compared e.g. due to different binning or axis ranges **and** if the metric is considered **non-critical**, -1. `CRIT_NC` if the histograms could not be compared e.g. due to different binning or axis ranges **and** if the metric is considered **critical**, -1. `BAD` if a test of a amtric fails that is considered **critical** -1. `UNKNOWN` used for instance when a test might have been defined but no metric was passed to be tested. - -## Usage - -The [Python script](o2dpg_release_validation.py) is the entrypoint of the RelVal and it has multiple sub-commands. - -The full help message of this script can be seen by typing -```bash -${O2DPG_ROOT}/RelVal/o2dpg_release_validation.py [] --help -``` -The wrapper includes 3 different sub-commands for now -1. `rel-val` to steer the RelVal, -1. `inspect` to print histograms of specified severity (if any), -1. `compare` to compare the results of 2 RelVal runs, -1. `print` simply print object names, metric names or test names line-by-line to the command line; convenient to further digest the output, -1. `influx` to convert the summary into a format that can be understood by and sent to an InfluxDB instance. - -Each sub-command can be run with `--help` to see all options/flags. - -### `rel-val` - -If you would like to compare 2 files (or sets of files), simply run -```bash -${O2DPG_ROOT}/RelVal/o2dpg_release_validation.py rel-val -i -j \ - [--include-dirs ] -``` -It will run the full release validation, dumps plots and further artifacts in the directory `rel_val` and prints a result summary in the terminal. -Via the optional `--include-patterns` a list of patterns can be passed so that only those ROOT sub-directories are taken into consideration which contain at least on of those patters, **Note** though, that regular expressions cannot (yet) be used. - -For the comparison of 2 sets of files this is always the first necessary step and of the most important outputs produced is `rel_val/Summary.json` which contains all the test results. It can be used for further and also more in-depth studies as mentioned in the following. - -There are also various plots created during the RelVal run. For each compared file there are -* overlay plots, 1D and 2D (to be found in the sub directory `overlayPlots`), -* 2D plots summarising the interpretations in a grid (called `SummaryTest.png`), -* pie charts showing the fraction of interpretations per metric (and potentially per test, if there are multiple), -* 1D plots showing the computed value and test means per metric (and potentially per test, if there are multiple). - - -### `inspect` -This command requires that a `rel-val` was run previously which produced a `/Summary.json`. - -Imagine you would like to change or experiment with some settings, e.g. you would like to only take objects with certain names into account or only enable certain metrics etc. These things you like to see reflected in the summary as well as in the produced plots. -This is possible with -```bash -${O2DPG_ROOT}/RelVal/o2dpg_release_validation.py inspect --path \ - [--include-patterns ] [--exclude-patterns ] \ - [--enable-metric ] [--disable-metric ] \ - [--interpretations ] \ - [--critical ] \ - [--output|-o ] -``` -All of those options, except for `--include-patterns` and `--exclude-patterns` also work with the `rel-val` command. -The output will by default be written to `rel_val_inspect`. All plots which are produced by the `rel-val` command are produced again for a potential given sub-set depending on the given options. Only the overlay plots are not produced again. - -**NOTE** that with `inspect` the original overlay plots satisfying your selection criteria (e.g. `--include-patters` or `--interpretations`) are also copied over to the target directory. - -**Other additional optional arguments** -* `--use-values-as-thresholds []`: By passing a set of summaries that where produced from `rel-val`, the computed metric values can be used as **new** thresholds. To decide how to combine the values for multiple metrics referring to the same object, the option `--combine-thresholds mean|extreme` can be used. Also, an additional relative margin can be added for each metric with `--margin-threshold `; this argument must be repeated for if it should be used for multiple metrics. -* `--regions []`: This computes means and standard deviations for each metric from previously computed values. The corresponding test is passed, if the value lies around the mean within the standard deviations. The deviation from the mean is also given as number-of-sigmas in the summary grid. -* `rel-val -i -j --no-extract` runs RelVal on **flat** ROOT files that have only histogram objects in them. - -### `print` -This command has the same optional arguments as the `inspect` command. But the only thing it does is writing some information line-by-line. For instance, to get the object names that were flagged `BAD` by the `chi2` metric, do -```bash -${O2DPG_ROOT}/RelVal/o2dpg_release_validation.py print --path --enable-metric chi2 --interpretations BAD -``` -If no RelVal was run but one would like to know the available metrics, one can check with -```bash -${O2DPG_ROOT}/RelVal/o2dpg_release_validation.py print --metric-names -``` - -### `influx` - -To convert the final output to something that can be digested by InfluxDB, use -```bash -${O2DPG_ROOT}/RelVal/o2dpg_release_validation.py influx --dir [--tags k1=v1 k2=v2 ...] [--table-name ] -``` -When the `--tags` argument is specified, these are injected as TAGS for InfluxDB in addition. The table name can also be specified explicitly; if not given, it defaults to `O2DPG_MC_ReleaseValidation`. - -## RelVal for QC (examples) - -### Comparing data with MC - -There is an ongoing effort to unify the names of QC objects inside MC and data QC files. Some are already unified and the following command would run comparison of those. However, others are not yet unified and will not be considered in the comparison. - -MC QC objects are usually distributed over multiple files while those from data are all contained in one single file. It is possible to directly compare them with -```bash -${O2DPG_ROOT}/RelVal/o2dpg_release_validation.py rel-val -i ${MC_PRODUCTION}/QC/*.root -j ${DATA_PRODUCTION}/QC.root [--include-dirs ] -``` - -## Run for QC -This is a simple guide to run RelVal for QC. - -Here is also a [working example](run/run_data_rel_val.sh), run it with -```bash -${O2DPG_ROOT}/RelVal/run/run_data_rel_val.sh [--qc QC1.root QC2.root ] [--aod AOD1.root AOD2.root] [ --labels LABEL1 LABEL2] -``` - -### If you are interested in all QC plots -To have everything and to use this as a starting point for deeper inspections, first run -```bash -${O2DPG_ROOT}/RelVal/o2dpg_release_validation.py rel-val -i QC_file_1.root -j QC_file_2.root -o rel_val_all [--labels meaningfulLabel1 meaningfulLabel2] -``` -Now, there is of course a lot but from now on you are fully flexible. - -In order to get some insight into a specific detector, say ITS, run -```bash -${O2DPG_ROOT}/RelVal/o2dpg_release_validation.py inspect --path rel_val_all --include-patterns "^ITS_" -o rel_val_ITS -``` -This will only print pie charts and summaries for ITS and also copies all overlay plots related to ITS to your target directory `rel_val_ITS`. - -The `inspect` command is much faster now since no new plots are generated and metrics do not have to be recomputed. It simply filters the results according to your criteria. However, what can be re-evaluated are the computed values against new thresholds. - -### If you are only interested in some ROOT sub-directories to begin with -If you only want to study for instance the ITS and CPV and there is no interest at this point to study any other detector, run -```bash -${O2DPG_ROOT}/RelVal/o2dpg_release_validation.py rel-val -i QC_file_1.root -j QC_file_2.root -o rel_val_all --include-dirs ITS CPV [--labels meaningfulLabel1 meaningfulLabel2] -``` -From here on, you can use the `inspect` command as usual. But there will never be detectors other than ITS and CPV. - -### Troubleshooting - -If there are unexpected segmentation faults or similar, most likely the `QualityControl` software is not properly linked against `O2`. Most likely, the reason is that `QC` was not rebuild against the loaded `O2` version. -The easiest solution would be to load either `QualityControl` or meta packages such as `O2sim`. -Loading like `O2/latest,QualityControl/latest` can cause problems depending on how the single packages were build. - -## Expert section - -### Adding a new metric -A new metric can be added in [ReleaseValidationMetrics.C](ReleaseValidationMetrics.C) by extending the function `void initialiseMetrics(MetricRunner& metricRunner)`. - -## Future plans - -* Store a JSON/JSONs on CCDB for central derivation of more refined thresholds or regions. +Please find the detailed documentation at [https://aliceo2group.github.io/simulation/docs/relval/](https://aliceo2group.github.io/simulation/docs/relval/). diff --git a/RelVal/o2dpg_release_validation.py b/RelVal/o2dpg_release_validation.py index c776bd624..5904a020a 100755 --- a/RelVal/o2dpg_release_validation.py +++ b/RelVal/o2dpg_release_validation.py @@ -11,11 +11,12 @@ import sys import argparse import importlib.util -from os import environ, makedirs, remove, rename +from os import environ, makedirs, remove from os.path import join, abspath, exists, dirname, basename, isfile -from shutil import copy, rmtree import json +import numpy as np + # make sure O2DPG + O2 is loaded O2DPG_ROOT=environ.get('O2DPG_ROOT') @@ -35,13 +36,13 @@ o2dpg_release_validation_utils = importlib.util.module_from_spec(spec) spec.loader.exec_module(o2dpg_release_validation_utils) sys.modules["o2dpg_release_validation_utils"] = o2dpg_release_validation_utils -from o2dpg_release_validation_utils import * +import o2dpg_release_validation_utils as utils spec = importlib.util.spec_from_file_location("o2dpg_release_validation_plot", join(O2DPG_ROOT, "RelVal", "utils", 'o2dpg_release_validation_plot.py')) o2dpg_release_validation_plot = importlib.util.module_from_spec(spec) spec.loader.exec_module(o2dpg_release_validation_plot) sys.modules["o2dpg_release_validation_plot"] = o2dpg_release_validation_plot -from o2dpg_release_validation_plot import plot_pie_charts, plot_summary_grid, plot_compare_summaries, plot_overlays +from o2dpg_release_validation_plot import plot_pie_charts, plot_summary_grid, plot_compare_summaries, plot_overlays, plot_value_histograms ROOT_MACRO_EXTRACT=join(O2DPG_ROOT, "RelVal", "utils", "ExtractAndFlatten.C") @@ -52,45 +53,10 @@ gROOT.SetBatch() -def copy_overlays(rel_val, input_dir, output_dir): - """ - copy overlay plots in this summary from the input directory to the output directory - """ - input_dir = abspath(input_dir) - output_dir = abspath(output_dir) - - if not exists(input_dir): - print(f"ERROR: Input directory {input_dir} does not exist") - return 1 - - inOutSame = input_dir == output_dir - - input_dir_new = input_dir + "_tmp" - if inOutSame: - # move input directory - rename(input_dir, input_dir_new) - input_dir = input_dir_new - - if not exists(output_dir): - makedirs(output_dir) - - object_names, _ = rel_val.get_result_per_metric_and_test() - object_names = list(set(object_names)) - - ret = 0 - for object_name in object_names: - filename=join(input_dir, f"{object_name}.png") - if exists(filename): - copy(filename, output_dir) - else: - print(f"File {filename} not found.") - ret = 1 - - if inOutSame: - rmtree(input_dir) - - return ret +############################################# +# Helper functions only used in this script # +############################################# def metrics_from_root(): """ @@ -100,7 +66,7 @@ def metrics_from_root(): if exists(log_file_name): remove(log_file_name) cmd = f"root -l -b -q {ROOT_MACRO_METRICS}" - ret = run_macro(cmd, log_file_name) + ret = utils.run_macro(cmd, log_file_name) if ret > 0: return ret @@ -116,13 +82,30 @@ def metrics_from_root(): return 0 -def extract(input_filenames, target_filename, include_file_directories=None, add_if_exists=False, reference_extracted=None, json_extracted=None): +def load_from_meta_json(json_path): + """ + Load a meta JSON file and return dictionary + """ + if not exists(json_path): + return None + + with open(json_path, "r") as f: + try: + return json.load(f) + except (json.decoder.JSONDecodeError, UnicodeDecodeError): + pass + return None + + +def extract_and_flatten_impl(input_filenames, target_filename, include_file_directories=None, add_if_exists=False, reference_extracted="", json_extracted=""): """ Wrap the extraction of objects to be compared Will be extracted (from TH1, QC objects, TTree etc.), converted to TH1 and put into a flat ROOT file structure. Args: + input_filenames: list + list of input filenames to extract objects from target_filename: str path to file where extracted objects should be saved include_file_directories: iterable or "" (default: "") @@ -132,15 +115,15 @@ def extract(input_filenames, target_filename, include_file_directories=None, add reference_extracted: str is used in case of the extraction of TTrees in which case the x-axis binning will be set according to that reference to make objects comparable. + json_extracted: str + the path to where the JSON file with the info of "what has been extracted where" will be saved + Returns: - bool - True in case of success, False otherwise + bool: True in case of success, False otherwise """ def get_files_from_list(list_filename): """ - Quick helper - - Extract filenames from what is listed in a given file + Quick helper to extract filenames from what is listed in a given file """ collect_files = [] with open(list_filename, "r") as f: @@ -153,13 +136,13 @@ def get_files_from_list(list_filename): include_file_directories = ",".join(include_file_directories) if include_file_directories else "" - # flat ROOT files to extract to and read from during RelVal; make absolute paths so we don't confuse ourselves when running e.g. ROOT macros in different directories - if len(input_filenames) == 1 and input_filenames[0][0] == "@": - input_filenames = get_files_from_list(input_filenames[0][1:]) - if not files1: - print(f"ERROR: Apparently {input_filenames[0][1:]} contains no files to be extracted.") - return None + # if there is only one filename and it starts with "@", assume that it contains the paths of the actual files that should be extracted + read_files_from = input_filenames[0][1:] + input_filenames = get_files_from_list(read_files_from) + if not input_filenames: + print(f"ERROR: Apparently {read_files_from} contains no files to be extracted.") + return False if exists(target_filename) and not add_if_exists: # this file will otherwise be updated if it exists @@ -169,10 +152,6 @@ def get_files_from_list(list_filename): cwd = dirname(target_filename) target_filename = basename(target_filename) log_file_name = join(cwd, f"{target_filename}_extract_and_flatten.log") - if not reference_extracted: - reference_extracted = "" - if not json_extracted: - json_extracted = "" print("Extraction of files") @@ -181,30 +160,23 @@ def get_files_from_list(list_filename): print(f" {f}") cmd = f"\\(\\\"{f}\\\",\\\"{target_filename}\\\",\\\"{reference_extracted}\\\",\\\"{include_file_directories}\\\",\\\"{json_extracted}\\\"\\)" cmd = f"root -l -b -q {ROOT_MACRO_EXTRACT}{cmd}" - ret = run_macro(cmd, log_file_name, cwd) + ret = utils.run_macro(cmd, log_file_name, cwd) if ret != 0: print(f"ERROR: Extracting from file {f} failed. Please check logfile {abspath(join(cwd, log_file_name))}") return False return True -def get_extract_json_info(json_path): - - if not exists(json_path): - return None - - with open(json_path, "r") as f: - try: - return json.load(f) - except (json.decoder.JSONDecodeError, UnicodeDecodeError): - pass - return None +def extract_and_flatten(files, output, label, include_directories=None, add_if_exists=False, prefix=None, reference_extracted=""): + """ + Extract from input files to a flat ROOT file -def only_extract_impl(files, output, label, include_directories=None, add_if_exists=False, prefix=None, reference_extracted=None): + Returns the path to a meta JSON and that JSON file loaded as dictionary + """ if len(files) == 1: - d = get_extract_json_info(files[0]) + d = load_from_meta_json(files[0]) if d is not None: return files[0], d @@ -216,7 +188,7 @@ def only_extract_impl(files, output, label, include_directories=None, add_if_exi json_out = abspath(join(output, json_out)) root_out = abspath(join(output, root_out)) - if not extract(files, root_out, include_file_directories=include_directories, add_if_exists=add_if_exists, reference_extracted=reference_extracted, json_extracted=json_out): + if not extract_and_flatten_impl(files, root_out, include_file_directories=include_directories, add_if_exists=add_if_exists, reference_extracted=reference_extracted, json_extracted=json_out): return None, None d = None @@ -231,12 +203,6 @@ def only_extract_impl(files, output, label, include_directories=None, add_if_exi return json_out, d -def only_extract(args): - if not only_extract_impl(args.input, args.output, None, args.label, args.reference): - return 1 - return 0 - - def rel_val_root(d1, d2, metrics_enabled, metrics_disabled, output_dir): """ RelVal for 2 ROOT files, simply a wrapper around ReleaseValidation.C macro @@ -280,7 +246,7 @@ def rel_val_root(d1, d2, metrics_enabled, metrics_disabled, output_dir): output_dir = abspath(output_dir) log_file_rel_val = join(output_dir, "rel_val.log") print("Running RelVal on extracted objects") - ret = run_macro(cmd, log_file_rel_val, cwd=output_dir) + ret = utils.run_macro(cmd, log_file_rel_val, cwd=output_dir) # This comes from the ROOT macro json_path = join(output_dir, "RelVal.json") @@ -309,7 +275,7 @@ def load_rel_val(json_path, include_patterns=None, exclude_patterns=None, enable Returns RelVal """ - rel_val = RelVal() + rel_val = utils.RelVal() rel_val.set_object_name_patterns(include_patterns, exclude_patterns) rel_val.enable_metrics(enable_metrics) rel_val.disable_metrics(disable_metrics) @@ -317,14 +283,14 @@ def load_rel_val(json_path, include_patterns=None, exclude_patterns=None, enable return rel_val -def initialise_evaluator(rel_val, thresholds, thresholds_default, thresholds_margins, thresholds_combine, regions): +def initialise_evaluator(rel_val, thresholds_paths, thresholds_default, thresholds_margins, thresholds_combine, regions_paths): """ Wrapper to create an evaluator Args: rel_val: RelVal the RelVal object that should potentially be tested and is used to derive default threshold - thresholds: iterable or None + thresholds_paths: iterable or None if not None, iterable of string as the paths to RelVal JSONs thresholds_defaults: iterable of 2-tuples or None assign a default threshold value (tuple[1]) to a metric name (tuple[0]) @@ -332,29 +298,44 @@ def initialise_evaluator(rel_val, thresholds, thresholds_default, thresholds_mar add a margin given in percent (tuple[1]) to a threshold value of a metric name (tuple[0]) thresholds_combine: str either "mean" or "extreme", how threshold values extracted from argument thresholds should be combined - regions: iterable or None + regions_paths: iterable or None if not None, iterable of string as the paths to RelVal JSONs Returns: Evaluator """ - evaluator = Evaluator() + evaluator = utils.Evaluator() # initialise to run tests on proper mean +- std - if regions: - rel_val_regions = get_summaries_or_from_file(regions) - initialise_regions(evaluator, rel_val_regions) + if regions_paths: + regions = utils.get_paths_or_from_file(regions_paths) + rel_val_regions = utils.RelVal() + rel_val_regions.load(regions) + utils.initialise_regions(evaluator, rel_val_regions) # initialise to run tests on thresholds thresholds_default = {metric_name: float(value) for metric_name, value in thresholds_default} if thresholds_default else None rel_val_thresholds = None - if thresholds: + if thresholds_paths: thresholds_margins = {metric_name: float(value) for metric_name, value in thresholds_margins} if thresholds_margins else None - rel_val_thresholds = get_summaries_or_from_file(thresholds) - initialise_thresholds(evaluator, rel_val, rel_val_thresholds, thresholds_default, thresholds_margins, thresholds_combine) + thresholds_paths = utils.get_paths_or_from_file(thresholds_paths) + rel_val_thresholds = utils.RelVal() + rel_val_thresholds.load(thresholds_paths) + utils.initialise_thresholds(evaluator, rel_val, rel_val_thresholds, thresholds_default, thresholds_margins, thresholds_combine) evaluator.initialise() return evaluator +################################################################### +# Functions that are called after command line has been processed # +################################################################### + + +def only_extract(args): + if not extract_and_flatten(args.input, args.output, None, args.label, args.reference)[0]: + # checking one of the return values for None + return 1 + return 0 + def rel_val(args): """ @@ -364,7 +345,7 @@ def rel_val(args): """ def interpret_results(result, metric): """ - Taking in a result and the metric it was derived from, assign an interpretation + Taking in a result and the corresponding metric it was derived from and assign an interpretation """ is_critical = args.is_critical is None or metric.name in args.is_critical if not metric.comparable and is_critical: @@ -373,13 +354,13 @@ def interpret_results(result, metric): if not metric.comparable: result.interpretation = variables.REL_VAL_INTERPRETATION_NONCRIT_NC return - if result.result_flag == Result.FLAG_UNKNOWN: + if result.result_flag == utils.Result.FLAG_UNKNOWN: result.interpretation = variables.REL_VAL_INTERPRETATION_UNKNOWN return - if result.result_flag == Result.FLAG_PASSED: + if result.result_flag == utils.Result.FLAG_PASSED: result.interpretation = variables.REL_VAL_INTERPRETATION_GOOD return - if result.result_flag == Result.FLAG_FAILED and is_critical: + if result.result_flag == utils.Result.FLAG_FAILED and is_critical: result.interpretation = variables.REL_VAL_INTERPRETATION_BAD return result.interpretation = variables.REL_VAL_INTERPRETATION_WARNING @@ -389,38 +370,42 @@ def interpret_results(result, metric): need_apply = False is_inspect = False - json1 = None - json2 = None + dict_1 = None + dict_2 = None if hasattr(args, "json_path"): # this comes from the inspect command is_inspect = True - json_path = get_summary_path(args.json_path) + json_path = utils.get_summary_path(args.json_path) annotations = None include_patterns, exclude_patterns = (args.include_patterns, args.exclude_patterns) else: # in this case, new input ROOT files were provided and we need to apply all our tests need_apply = True + # always take everything include_patterns, exclude_patterns = (None, None) if args.add: print(f"NOTE: Extracted objects will be added to existing ones in case there was already a RelVal at {args.output}.\n") - json1 = only_extract_impl(args.input1, args.output, args.labels[0], args.include_dirs, args.add, prefix="1", reference_extracted=None) - json2 = only_extract_impl(args.input2, args.output, args.labels[1], args.include_dirs, args.add, prefix="2", reference_extracted=json1[1]["path"]) - if None in json1 or None in json2: - print("ERROR: Something went wrong during the extraction") + # each extraction will leave us with a JSON + json_path_1, dict_1 = extract_and_flatten(args.input1, args.output, args.labels[0], args.include_dirs, args.add, prefix="1", reference_extracted=None) + if not json_path_1: + return 1 + json_path_2, dict_2 = extract_and_flatten(args.input2, args.output, args.labels[1], args.include_dirs, args.add, prefix="2", reference_extracted=dict_1["path"]) + if not json_path_2: return 1 - json_path = rel_val_root(json1[1], json2[1], args.enable_metric, args.disable_metric, args.output) + json_path = rel_val_root(dict_1, dict_2, args.enable_metric, args.disable_metric, args.output) if json_path is None: print("ERROR: Problem during RelVal") return 1 - annotations = {"json_path_1": json1[0], - "json_path_2": json2[0]} + annotations = {"json_path_1": json_path_1, + "json_path_2": json_path_2} + # now loading and constructing a RelVal object rel_val = load_rel_val(json_path, include_patterns, exclude_patterns, args.enable_metric, args.disable_metric) if need_apply or args.use_values_as_thresholds or args.default_threshold or args.regions: evaluator = initialise_evaluator(rel_val, args.use_values_as_thresholds, args.default_threshold, args.margin_threshold, args.combine_thresholds, args.regions) - rel_val.apply(evaluator) + rel_val.apply_evaluator(evaluator) # assign interpretations to the results we got rel_val.interpret(interpret_results) @@ -435,7 +420,7 @@ def filter_on_interpretations(result): # if this comes from inspecting, there will be the annotations from the rel-val before that ==> re-write it rel_val.write(join(args.output, "Summary.json"), annotations=annotations or rel_val.annotations[0]) - print_summary(rel_val, variables.REL_VAL_SEVERITIES, long=args.print_long) + utils.print_summary(rel_val, variables.REL_VAL_SEVERITIES, long=args.print_long) if not args.no_plot: print("Now plotting...") @@ -443,21 +428,19 @@ def filter_on_interpretations(result): plot_pie_charts(rel_val, variables.REL_VAL_SEVERITIES, variables.REL_VAL_SEVERITY_COLOR_MAP, args.output) plot_compare_summaries((rel_val,), args.output) plot_summary_grid(rel_val, variables.REL_VAL_SEVERITIES, variables.REL_VAL_SEVERITY_COLOR_MAP, args.output) + plot_value_histograms(rel_val, args.output) if is_inspect: if annotations_inspect := rel_val.annotations: annotations_inspect = annotations_inspect[0] - d1 = get_extract_json_info(annotations_inspect["json_path_1"]) - d2 = get_extract_json_info(annotations_inspect["json_path_2"]) - else: - d1 = json1[1] - d2 = json2[1] + dict_1 = load_from_meta_json(annotations_inspect["json_path_1"]) + dict_2 = load_from_meta_json(annotations_inspect["json_path_2"]) - if d1 and d2: + if dict_1 and dict_2: overlay_plots_out = join(args.output, "overlayPlots") if not exists(overlay_plots_out): makedirs(overlay_plots_out) - plot_overlays(rel_val, d1, d2, overlay_plots_out) + plot_overlays(rel_val, dict_1, dict_2, overlay_plots_out) return 0 @@ -473,8 +456,8 @@ def compare(args): output_dir = args.output # load - rel_val1 = load_rel_val(get_summary_path(args.input1[0]), args.include_patterns, args.exclude_patterns, args.enable_metric, args.disable_metric) - rel_val2 = load_rel_val(get_summary_path(args.input2[0]), args.include_patterns, args.exclude_patterns, args.enable_metric, args.disable_metric) + rel_val1 = load_rel_val(utils.get_summary_path(args.input1[0]), args.include_patterns, args.exclude_patterns, args.enable_metric, args.disable_metric) + rel_val2 = load_rel_val(utils.get_summary_path(args.input2[0]), args.include_patterns, args.exclude_patterns, args.enable_metric, args.disable_metric) # get the test and metric names they have in common test_names = np.intersect1d(rel_val1.known_test_names, rel_val2.known_test_names) @@ -490,8 +473,8 @@ def compare(args): if args.interpretations and interpretation not in args.interpretations: continue # object names of Results matching an interpretation - object_names_interpretation1 = object_names1[count_interpretations(results1, interpretation)] - object_names_interpretation2 = object_names2[count_interpretations(results2, interpretation)] + object_names_interpretation1 = object_names1[utils.count_interpretations(results1, interpretation)] + object_names_interpretation2 = object_names2[utils.count_interpretations(results2, interpretation)] # elements in 1 that are not in 2... only_in1 = np.setdiff1d(object_names_interpretation1, object_names_interpretation2) # ...and the other way round @@ -519,7 +502,7 @@ def influx(args): """ Create an influxDB metrics file """ - rel_val = load_rel_val(get_summary_path(args.path)) + rel_val = load_rel_val(utils.get_summary_path(args.path)) output_path = args.path if isfile(args.path) else join(args.path, "influxDB.dat") table_name = "O2DPG_MC_ReleaseValidation" @@ -566,7 +549,7 @@ def print_simple(args): return 0 return metrics_from_root() - rel_val = load_rel_val(get_summary_path(args.path), args.include_patterns, args.exclude_patterns, args.enable_metric, args.disable_metric) + rel_val = load_rel_val(utils.get_summary_path(args.path), args.include_patterns, args.exclude_patterns, args.enable_metric, args.disable_metric) def filter_on_interpretations(result): # only consider those results that match a flag requested by the user @@ -594,14 +577,18 @@ def print_header(): print(f"\n{'#' * 25}\n#{' ' * 23}#\n# RUN ReleaseValidation #\n#{' ' * 23}#\n{'#' * 25}\n") -# we define the parser here +################################################################ +# define the parser globally so that it could even be imported # +################################################################ +# common parser for digesting input files COMMON_FILE_PARSER = argparse.ArgumentParser(add_help=False) COMMON_FILE_PARSER.add_argument("-i", "--input1", nargs="*", help="EITHER first set of input files for comparison OR first input directory from simulation for comparison", required=True) COMMON_FILE_PARSER.add_argument("-j", "--input2", nargs="*", help="EITHER second set of input files for comparison OR second input directory from simulation for comparison", required=True) COMMON_FILE_PARSER.add_argument("--labels", nargs=2, help="labels you want to appear in the plot legends in case of overlay plots from batches -i and -j", default=("batch_i", "batch_j")) COMMON_FILE_PARSER.add_argument("--no-extract", dest="no_extract", action="store_true", help="no extraction but immediately expect histograms present for comparison") +# common parser digesting options related to thresholds COMMON_THRESHOLD_PARSER = argparse.ArgumentParser(add_help=False) COMMON_THRESHOLD_PARSER.add_argument("--regions", help="Use calculated regions to test status") COMMON_THRESHOLD_PARSER.add_argument("--default-threshold", dest="default_threshold", action="append", nargs=2) @@ -609,41 +596,53 @@ def print_header(): COMMON_THRESHOLD_PARSER.add_argument("--combine-thresholds", dest="combine_thresholds", choices=["mean", "extreme"], help="Arithmetic mean or extreme value is chosen as threshold", default="mean") COMMON_THRESHOLD_PARSER.add_argument("--margin-threshold", dest="margin_threshold", action="append", nargs=2) +# common parser to digest metric options COMMON_METRIC_PARSER = argparse.ArgumentParser(add_help=False) COMMON_METRIC_PARSER.add_argument("--enable-metric", dest="enable_metric", nargs="*") COMMON_METRIC_PARSER.add_argument("--disable-metric", dest="disable_metric", nargs="*") +# common parser to digest object name patterns COMMON_PATTERN_PARSER = argparse.ArgumentParser(add_help=False) COMMON_PATTERN_PARSER.add_argument("--include-patterns", dest="include_patterns", nargs="*", help="include objects whose name includes at least one of the given patterns (takes precedence)") COMMON_PATTERN_PARSER.add_argument("--exclude-patterns", dest="exclude_patterns", nargs="*", help="exclude objects whose name includes at least one of the given patterns") +# common parser to digest options related to interpretations COMMON_FLAGS_PARSER = argparse.ArgumentParser(add_help=False) COMMON_FLAGS_PARSER.add_argument("--interpretations", nargs="*", help="extract all objects which have at least one test with this severity flag", choices=list(variables.REL_VAL_SEVERITY_MAP.keys())) COMMON_FLAGS_PARSER.add_argument("--is-critical", dest="is_critical", nargs="*", help="set names of metrics that are assumed to be critical") +# common parser to handle verbosity COMMON_VERBOSITY_PARSER = argparse.ArgumentParser(add_help=False) COMMON_VERBOSITY_PARSER.add_argument("--print-long", dest="print_long", action="store_true", help="enhance verbosity") COMMON_VERBOSITY_PARSER.add_argument("--no-plot", dest="no_plot", action="store_true", help="suppress plotting") +# The main parser PARSER = argparse.ArgumentParser(description='Wrapping ReleaseValidation macro') + +# Use various sub-parsers SUB_PARSERS = PARSER.add_subparsers(dest="command") + +# rel-val REL_VAL_PARSER = SUB_PARSERS.add_parser("rel-val", parents=[COMMON_FILE_PARSER, COMMON_METRIC_PARSER, COMMON_THRESHOLD_PARSER, COMMON_FLAGS_PARSER, COMMON_VERBOSITY_PARSER]) REL_VAL_PARSER.add_argument("--include-dirs", dest="include_dirs", nargs="*", help="only include desired directories inside ROOT file; note that each pattern is assumed to start in the top-directory (at the moment no regex or *)") REL_VAL_PARSER.add_argument("--add", action="store_true", help="If given and there is already a RelVal in the output directory, extracted objects will be added to the existing ones") REL_VAL_PARSER.add_argument("--output", "-o", help="output directory", default="rel_val") REL_VAL_PARSER.set_defaults(func=rel_val) +# inspect INSPECT_PARSER = SUB_PARSERS.add_parser("inspect", parents=[COMMON_THRESHOLD_PARSER, COMMON_METRIC_PARSER, COMMON_PATTERN_PARSER, COMMON_FLAGS_PARSER, COMMON_VERBOSITY_PARSER]) INSPECT_PARSER.add_argument("--path", dest="json_path", help="either complete file path to a Summary.json or directory where one of the former is expected to be", required=True) INSPECT_PARSER.add_argument("--output", "-o", help="output directory", default="rel_val_inspect") INSPECT_PARSER.set_defaults(func=rel_val) +# compare COMPARE_PARSER = SUB_PARSERS.add_parser("compare", parents=[COMMON_FILE_PARSER, COMMON_PATTERN_PARSER, COMMON_METRIC_PARSER, COMMON_VERBOSITY_PARSER, COMMON_FLAGS_PARSER]) COMPARE_PARSER.add_argument("--output", "-o", help="output directory", default="rel_val_comparison") COMPARE_PARSER.add_argument("--difference", action="store_true", help="plot histograms with different severity") COMPARE_PARSER.add_argument("--plot", action="store_true", help="plot value and threshold comparisons of RelVals") COMPARE_PARSER.set_defaults(func=compare) +# influx INFLUX_PARSER = SUB_PARSERS.add_parser("influx") INFLUX_PARSER.add_argument("--path", help="directory where ReleaseValidation was run", required=True) INFLUX_PARSER.add_argument("--tags", nargs="*", help="tags to be added for influx, list of key=value") @@ -651,6 +650,7 @@ def print_header(): INFLUX_PARSER.add_argument("--output", "-o", help="output path; if not given, a file influxDB.dat is places inside the RelVal directory") INFLUX_PARSER.set_defaults(func=influx) +# print PRINT_PARSER = SUB_PARSERS.add_parser("print", parents=[COMMON_METRIC_PARSER, COMMON_PATTERN_PARSER, COMMON_FLAGS_PARSER]) PRINT_PARSER.add_argument("--path", help="either complete file path to a Summary.json or directory where one of the former is expected to be") PRINT_PARSER.add_argument("--metric-names", dest="metric_names", action="store_true") @@ -658,6 +658,7 @@ def print_header(): PRINT_PARSER.add_argument("--object-names", dest="object_names", action="store_true") PRINT_PARSER.set_defaults(func=print_simple) +# extract EXTRACT_PARSER = SUB_PARSERS.add_parser("extract", parents=[COMMON_VERBOSITY_PARSER]) EXTRACT_PARSER.add_argument("--input", nargs="*", help="Set of input files to be extracted", required=True) EXTRACT_PARSER.add_argument("--output", "-o", help="output directory", default="rel_val_extracted") diff --git a/RelVal/utils/o2dpg_release_validation_plot.py b/RelVal/utils/o2dpg_release_validation_plot.py index 17b774dab..562f5446d 100755 --- a/RelVal/utils/o2dpg_release_validation_plot.py +++ b/RelVal/utils/o2dpg_release_validation_plot.py @@ -7,11 +7,11 @@ from os import environ import importlib.util from itertools import product -import re import numpy as np import matplotlib.pyplot as plt from matplotlib.colors import LinearSegmentedColormap import seaborn +from scipy.stats import iqr O2DPG_ROOT = environ.get("O2DPG_ROOT") @@ -27,13 +27,19 @@ sys.modules["o2dpg_release_validation_plot_root"] = o2dpg_release_validation_plot_root from o2dpg_release_validation_plot_root import plot_overlays_root, plot_overlays_root_no_rel_val + def plot_pie_charts(rel_val, interpretations, interpretation_colors, out_dir, title="", get_figure=False): + """ + Plot pie charts per metric and test + Each pie chart shows the ratio of given interpretations + """ print("==> Plot pie charts <==") for metric_name, test_name in product(rel_val.known_metrics, rel_val.known_test_names): figure, ax = plt.subplots(figsize=(20, 20)) - colors = [] + # collect counts of interpretations, their colours and labels counts = [] + colors = [] labels = [] object_names, results = rel_val.get_result_per_metric_and_test(metric_name, test_name) @@ -60,7 +66,47 @@ def plot_pie_charts(rel_val, interpretations, interpretation_colors, out_dir, ti plt.close(figure) +def plot_value_histograms(rel_val, out_dir, title="values histogram", get_figure=False): + """ + Plot a histogram of metric values + """ + + print("==> Plot value histograms <==") + for metric_name in rel_val.known_metrics: + figure, ax = plt.subplots(figsize=(20, 20)) + values = [] + for _, _, metric in zip(*rel_val.get_metrics(metric_name=metric_name)): + if not metric.comparable: + continue + values.append(metric.value) + + if not values: + continue + + ax.set_xlabel(metric_name, fontsize=20) + ax.set_ylabel("counts", fontsize=20) + ax.hist(values, bins=100) + ax.tick_params("both", labelsize=20) + figure.tight_layout() + + figure.suptitle(f"{title} (metric: {metric_name})", fontsize=40) + save_path = join(out_dir, f"histogram_values_{metric_name}.png") + figure.savefig(save_path) + if get_figure: + return figure + plt.close(figure) + + def plot_summary_grid(rel_val, interpretations, interpretation_colors, output_dir, get_figure=False): + """ + Plot a summary grid per test. + + horizontal axis: metric names + vertical axis: object names + + Each cell is coloured according to an interpretation. + In addition, the cells contain the computed metric values + """ print("==> Plot summary grid <==") @@ -74,12 +120,15 @@ def plot_summary_grid(rel_val, interpretations, interpretation_colors, output_di for nt in range(rel_val.number_of_tests): metric_names, object_names, results_matrix = rel_val.get_result_matrix_objects_metrics(nt) - arr = np.full(results_matrix.shape, 0, dtype=int) + # make an array where each interpretation is mapped to a numerical value + arr_interpretation = np.full(results_matrix.shape, 0, dtype=int) + # collect annotations for each cell arr_annot = np.full(results_matrix.shape, "", dtype=object) + # iterate over the cells and set values and annotations it = np.nditer(results_matrix, flags=['multi_index', "refs_ok"]) for _ in it: result = results_matrix[it.multi_index] - arr[it.multi_index] = interpretation_name_to_number[result.interpretation] + arr_interpretation[it.multi_index] = interpretation_name_to_number[result.interpretation] if result.value is not None: annot = f"{result.value:.3f} (mean: {result.mean:.3f})" if result.n_sigmas is not None: @@ -89,8 +138,9 @@ def plot_summary_grid(rel_val, interpretations, interpretation_colors, output_di arr_annot[it.multi_index] = annot + #now comes the plotting figure, ax = plt.subplots(figsize=(20, 20)) - seaborn.heatmap(arr, ax=ax, cmap=cmap, vmin=-0.5, vmax=len(interpretations) - 0.5, yticklabels=object_names, xticklabels=metric_names, linewidths=0.5, annot=arr_annot, fmt="") + seaborn.heatmap(arr_interpretation, ax=ax, cmap=cmap, vmin=-0.5, vmax=len(interpretations) - 0.5, yticklabels=object_names, xticklabels=metric_names, linewidths=0.5, annot=arr_annot, fmt="") cbar = ax.collections[0].colorbar cbar.set_ticks(range(len(colors))) cbar.set_ticklabels(interpretations) @@ -111,7 +161,7 @@ def plot_summary_grid(rel_val, interpretations, interpretation_colors, output_di def plot_compare_summaries(rel_vals, out_dir, *, labels=None, get_figure=False): """ - if labels is given, it needs to have the same length as summaries + Plot the metric values for each object. """ print("==> Plot metric values <==") @@ -156,10 +206,16 @@ def plot_compare_summaries(rel_vals, out_dir, *, labels=None, get_figure=False): def plot_overlays(rel_val, file_config_map1, file_config_map2, out_dir, plot_regex=None): + """ + Wrapper around ROOT overlay plotting + """ print("==> Plot overlays <==") plot_overlays_root(rel_val, file_config_map1, file_config_map2, out_dir, plot_regex) def plot_overlays_no_rel_val(file_configs, out_dir): + """ + Wrapper around ROOT plotting when no RelVal object is given + """ print("==> Plot overlays <==") plot_overlays_root_no_rel_val(file_configs, out_dir) diff --git a/RelVal/utils/o2dpg_release_validation_utils.py b/RelVal/utils/o2dpg_release_validation_utils.py index 873d4de44..633e424c6 100755 --- a/RelVal/utils/o2dpg_release_validation_utils.py +++ b/RelVal/utils/o2dpg_release_validation_utils.py @@ -3,7 +3,9 @@ # Definition of common functionality import re -from os.path import join, exists, isdir +from os.path import join, exists, isdir, abspath +from os import makedirs, rename +from shutil import rmtree, copy from itertools import product from subprocess import Popen, PIPE, STDOUT from shlex import split @@ -11,20 +13,6 @@ import numpy as np -def remove_outliers(data, m=6.): - """ - Helper to remove outliers from a list of floats - """ - if not data: - return None, None - data = np.array(data) - d = np.abs(data - np.median(data)) - mdev = np.median(d) - s = d / (mdev if mdev else 1.) - print(s) - return data[s < m], data[s >= m] - - def default_evaluation(limits): """ Return a lambda f(value) -> bool @@ -34,10 +22,10 @@ def default_evaluation(limits): if limits[0] is None and limits[1] is None: return lambda x: None if limits[0] is not None and limits[1] is None: - return lambda x: x > limits[0] + return lambda x: x >= limits[0] if limits[0] is None and limits[1] is not None: - return lambda x: x < limits[1] - return lambda x: limits[0] < x < limits[1] + return lambda x: x <= limits[1] + return lambda x: limits[0] <= x <= limits[1] def compute_limits(mean, std): @@ -106,6 +94,9 @@ def __init__(self, object_name=None, name=None, value=None, proposed_threshold=N if in_dict is not None: self.from_dict(in_dict) + def __eq__(self, other): + return self.object_name == other.object_name and self.name == other.name + def as_dict(self): return {"object_name": self.object_name, "metric_name": self.name, @@ -190,36 +181,6 @@ def initialise(self): self.metric_names = np.array(self.metric_names, dtype=str) self.test_names = np.array(self.test_names, dtype=str) self.tests = np.array(self.tests, dtype=TestLimits) - - # fill up tests - # The following guarantees that we have all metrics and all tests for the object names - # NOTE Probably there is a more elegant way?! - test_names_known = np.unique(self.test_names) - metric_names_known = np.unique(self.metric_names) - object_names_known = np.unique(self.object_names) - - object_names_to_add = [] - metric_names_to_add = [] - test_names_to_add = [] - - for object_name, metric_name in product(object_names_known, metric_names_known): - mask = (self.object_names == object_name) & (self.metric_names == metric_name) - if not np.any(mask): - object_names_to_add.extend([object_name] * len(test_names_known)) - metric_names_to_add.extend([metric_name] * len(test_names_known)) - test_names_to_add.extend(test_names_known) - continue - present_test_names = self.test_names[mask] - test_names_not_present = test_names_known[~np.isin(present_test_names, test_names_known)] - test_names_to_add.extend(test_names_not_present) - metric_names_to_add.extend([metric_name] * len(test_names_not_present)) - object_names_to_add.extend([object_name] * len(test_names_not_present)) - - self.object_names = np.array(np.append(self.object_names, object_names_to_add)) - self.metric_names = np.array(np.append(self.metric_names, metric_names_to_add)) - self.test_names = np.array(np.append(self.test_names, test_names_to_add)) - self.tests = np.array(np.append(self.tests, [TestLimits(tnta) for tnta in test_names_to_add])) - self.mask_any = np.full(self.test_names.shape, True) def test(self, metrics): @@ -253,42 +214,51 @@ class RelVal: def __init__(self): # metric names that should be considered (if empty, all) self.include_metrics = [] + # metric names that should be excluded, takes precedence over self.include_metrics self.exclude_metrics = [] - # lists of regex to include/exclude objects by name + # lists of regex to include/exclude objects by their names self.include_patterns = None self.exclude_patterns = None - # collecting everything we have; all of the following will have the same length in the end + # collecting everything we have; the following three members will have the same length self.object_names = None self.metric_names = None # metric objects self.metrics = None - # object and metric names known to this RelVal + # unique object and metric names self.known_objects = None self.known_metrics = None - # collecting all results; all of the following will have the same length in the end + # collecting all results; the following three members will have the same length self.results = None - # indices to refer to self.object_names, self.metric_names and self.metrics + # each index refers to the corresponding object in self.object_names, self.metric_names and self.metrics self.results_to_metrics_idx = None + + # unique list of test names self.known_test_names = None # to store some annotations self.annotations = None - def enable_metrics(self, metrics): - if not metrics: + def enable_metrics(self, metric_names): + """ + Enable a list of metrics given their names + """ + if not metric_names: return - for metric in metrics: + for metric in metric_names: if metric in self.include_metrics: continue self.include_metrics.append(metric) - def disable_metrics(self, metrics): - if not metrics: + def disable_metrics(self, metric_names): + """ + Disable a list of metrics given their names + """ + if not metric_names: return - for metric in metrics: + for metric in metric_names: if metric in self.exclude_metrics: continue self.exclude_metrics.append(metric) @@ -367,29 +337,25 @@ def add_result(self, metric_idx, result): self.results_to_metrics_idx.append(metric_idx) self.results.append(result) - def load(self, summaries_to_test): + def get_metric_checking_dict(self, in_dict): + """ + Check if that metric is already known + """ + if self.metrics is None: + return None, None - self.annotations = [] - self.object_names = [] - self.metric_names = [] - self.metrics = [] - self.results_to_metrics_idx = [] - self.results = [] + metric = Metric(in_dict=in_dict) - for summary_to_test in summaries_to_test: - summary_to_test = self.read(summary_to_test) - if annotations := summary_to_test.get(RelVal.KEY_ANNOTATIONS, None): - self.annotations.append(annotations) - for line in summary_to_test[RelVal.KEY_OBJECTS]: - metric = Metric(in_dict=line) - if not self.add_metric(metric): - continue + for idx, search_metric in enumerate(self.metrics): + if metric == search_metric: + return idx, search_metric - if "result_name" in line: - # NOTE We could think about not duplicating metrics. - # Because there is the same metric for each of the corresponding test results - self.add_result(len(self.metrics) - 1, Result(in_dict=line)) + return None, metric + def to_numpy(self): + """ + Convert everything that is a list to numpy for faster querying later on + """ self.known_objects = np.unique(self.object_names) self.known_metrics = np.unique(self.metric_names) @@ -402,8 +368,42 @@ def load(self, summaries_to_test): self.results_to_metrics_idx = np.array(self.results_to_metrics_idx, dtype=int) if self.results else None self.test_names_results = np.array([r.name for r in self.results]) if self.results else None self.known_test_names = np.unique(self.test_names_results) if self.results else None - self.result_filter_mask = np.full(self.known_test_names.shape, True) if self.results else None self.results = np.array(self.results, dtype=Result) if self.results else None + self.result_filter_mask = np.full(self.results.shape, True) if self.results is not None else None + + def load(self, summaries_to_test): + """ + Loads and populates this object from a dictionary + """ + self.annotations = [] + self.object_names = [] + self.metric_names = [] + self.metrics = [] + self.results_to_metrics_idx = [] + self.results = [] + + for summary_to_test in summaries_to_test: + # loop over the list of dictionaries given + summary_to_test = self.read(summary_to_test) + if annotations := summary_to_test.get(RelVal.KEY_ANNOTATIONS, None): + self.annotations.append(annotations) + for line in summary_to_test[RelVal.KEY_OBJECTS]: + # each list object corresponds to and object with a certain test result + # first of all we check if that metric is already loaded + idx, metric = self.get_metric_checking_dict(line) + if idx is None: + # in this case, this metric is new + idx = len(self.metrics) - 1 + if not self.add_metric(metric): + # only attempt to add if that metric is not yet there + continue + + if "result_name" in line: + # add this result; the result will be mapped to the metric it is based on via the index + self.add_result(idx, Result(in_dict=line)) + + # convert everything that was a list before to numpy objects + self.to_numpy() def get_metrics(self, object_name=None, metric_name=None): """ @@ -422,7 +422,7 @@ def get_metrics(self, object_name=None, metric_name=None): mask = mask & (self.any_mask if metric_name is None else np.isin(self.metric_names, metric_name)) return self.object_names[mask], self.metric_names[mask], self.metrics[mask] - def apply(self, evaluator): + def apply_evaluator(self, evaluator): """ Apply loaded tests """ @@ -438,20 +438,32 @@ def apply(self, evaluator): self.results_to_metrics_idx, self.results = evaluator.test(self.metrics) self.test_names_results = np.array([r.name for r in self.results]) self.known_test_names = np.unique(self.test_names_results) - self.result_filter_mask = np.full(self.known_test_names.shape, True) + self.result_filter_mask = np.full(self.results.shape, True) def interpret(self, interpret_func): + """ + Add an interpretation to the Result objects based on a function given by the user + """ for metric_idx, result in zip(self.results_to_metrics_idx, self.results): interpret_func(result, self.metrics[metric_idx]) def filter_results(self, filter_func): + """ + Construct a mask to filter results without losing any of them + """ if self.results is None: return self.result_filter_mask = [filter_func(result) for result in self.results] def query_results(self, query_func=None): + """ + Query Result objects based on a function given by the user + + Return matching Result objects along with names + """ mask = np.array([query_func is None or query_func(result) for result in enumerate(self.results)]) - mask = mask & self.result_filter_mask + if self.result_filter_mask is not None: + mask = mask & self.result_filter_mask idx = self.results_to_metrics_idx[mask] return np.take(self.object_names, idx), np.take(self.metric_names, idx), self.test_names_results[idx], self.results[idx] @@ -474,15 +486,28 @@ def get_metric_name(self, idx): return self.known_metrics[idx] def get_result_per_metric_and_test(self, metric_index_or_name=None, test_index_or_name=None): + """ + Return Result objects that belong to given metric or test + """ test_name = test_index_or_name if (isinstance(test_index_or_name, str) or test_index_or_name is None) else self.known_test_names[test_index_or_name] metric_name = metric_index_or_name if (isinstance(metric_index_or_name, str) or metric_index_or_name is None) else self.known_metrics[metric_index_or_name] metric_idx = np.argwhere(self.metric_names == metric_name) if metric_name is not None else self.results_to_metrics_idx - mask = np.isin(self.results_to_metrics_idx, metric_idx) & self.result_filter_mask + mask = np.isin(self.results_to_metrics_idx, metric_idx) + if self.result_filter_mask is not None: + mask = mask & self.result_filter_mask if test_name is not None: mask = mask & (self.test_names_results == test_name) return np.take(self.object_names, self.results_to_metrics_idx[mask]), self.results[mask] def get_result_matrix_objects_metrics(self, test_index): + """ + Return a matrix of Result objects + + vertical axis: object names + horizontal axis: metric names + + in addition return metric and object names so the user knows what she gets + """ mask = self.test_names_results == (self.known_test_names[test_index]) idx = self.results_to_metrics_idx[mask] results = self.results[mask] @@ -497,9 +522,15 @@ def get_result_matrix_objects_metrics(self, test_index): return metric_names, object_names, np.reshape(results[idx], (len(object_names), len(metric_names))) def yield_metrics_results_per_object(self): + """ + One-by-one return metrics and results of objects + """ results = None if self.results is not None: - mask = self.result_filter_mask + if self.result_filter_mask is not None: + mask = self.result_filter_mask + else: + mask = np.full(self.results.shape, True) idx = self.results_to_metrics_idx[mask] object_names = np.take(self.object_names, idx) metrics = np.take(self.metrics, idx) @@ -515,10 +546,13 @@ def yield_metrics_results_per_object(self): yield object_name, yield_metrics, yield_results def write(self, filepath, annotations=None): + """ + Write everything to a JSON file + Structure corresponds to what ROOT's RelVal returns so in turn it can be used to construct a RelVal object again + """ all_objects = [] - # TODO return one flat dictionary not a nested one def make_dict_include_results(object_name, metric, result): return {RelVal.KEY_OBJECT_NAME: object_name} | metric.as_dict() | result.as_dict() @@ -528,7 +562,7 @@ def make_dict_exclude_results(object_name, metric, *args): if self.results is None: object_names = self.object_names metrics = self.metrics - results = np.empty(metric.shape, dtype=bool) + results = np.empty(metrics.shape, dtype=bool) make_dict = make_dict_exclude_results else: object_names = np.take(self.object_names, self.results_to_metrics_idx) @@ -546,20 +580,26 @@ def make_dict_exclude_results(object_name, metric, *args): json.dump(final_dict, f, indent=2) -def get_summaries_or_from_file(in_objects): - - if len(in_objects) == 1 and in_objects[0].startswith("@"): - with open(in_objects[0][1:], "r") as f: +def get_paths_or_from_file(paths): + """ + Either simply return the paths or extract them from a text file + """ + if len(paths) == 1 and paths[0].startswith("@"): + with open(paths[0][1:], "r") as f: return f.read().splitlines() - return in_objects + return paths def initialise_thresholds(evaluator, rel_val, rel_val_thresholds, thresholds_default, thresholds_margin, thresholds_combine="mean"): - + """ + Add thresholds to the Evaluator as one test case + """ # The default thresholds will be derived and set for all the objects and metrics that we find in the RelVal to test _, _, metrics = rel_val.get_metrics() for metric in metrics: + # get the default thresholds for each metric proposed_threshold = thresholds_default.get(metric.name, metric.proposed_threshold) if thresholds_default else metric.proposed_threshold + # depending on what's better (lower/greater), set the std boundaries std = (None, 0) if metric.lower_is_better else (0, None) evaluator.add_limits(metric.object_name, metric.name, TestLimits("threshold_default", proposed_threshold, std)) @@ -569,56 +609,74 @@ def initialise_thresholds(evaluator, rel_val, rel_val_thresholds, thresholds_def for object_name in rel_val_thresholds.known_objects: for metric_name in rel_val_thresholds.known_metrics: + # get metric for given objects by name _, _, metrics = rel_val_thresholds.get_metrics((object_name,), (metric_name,)) + if not np.any(metrics): continue + # collect all values from all metrics for this object values = [m.value for m in metrics if m.comparable] + # check what is better, lower or greater lower_is_better = metrics[0].lower_is_better factor = 1 if lower_is_better else -1 if not values: + evaluator.add_limits(object_name, metric_name, TestLimits("threshold_user")) continue if thresholds_combine == "mean": + # combine the values, by default take the mean as the threshold mean_central = np.mean(values) else: + # otherwise take the extremum mean_central = factor * max([factor * v for v in values]) + margin = thresholds_margin[metric_name] * mean_central if thresholds_margin and metric_name in thresholds_margin else 0 + # put together the std limits and add the TestLimits to the Evaluator if factor > 0: low = None - up = (1 + thresholds_margin[metric_name]) * mean_central + up = margin else: up = None - low = (1 - thresholds_margin) * mean_central + low = margin evaluator.add_limits(object_name, metric_name, TestLimits("threshold_user", mean_central, (low, up))) -def initialise_regions(evaluator, regions): - rel_val_regions = RelVal() - rel_val_regions.load(regions) +def initialise_regions(evaluator, rel_val_regions): + """ + Add regions to the Evaluator as test case + """ + # Loop through everything for object_name in rel_val_regions.known_objects: for metric_name in rel_val_regions.known_metrics: _, _, metrics = rel_val_regions.get_metrics((object_name,), (metric_name,)) + # get all the metric values for the given object and a particular metric values = [m.value for m in metrics if m.comparable] + # extract some properties of the metrics that need to be known proposed_threshold = metrics[0].proposed_threshold lower_is_better = metrics[0].lower_is_better + # a list of metric values where outliers are removed values_central = [] + # a list of metric values with only outliers values_outlier = [] for v in values: diff = v - proposed_threshold if (diff < 0 and lower_is_better) or (diff > 0 and not lower_is_better): - # if the value is below and lower is better (or the other way round), then accept it + # if the value is below and lower is better (or the other way round), then accept it because it is definitely better than even the proposed threshold values_central.append(v) continue if diff != 0: + # check how far off the calculated difference is from the proposed value diff = abs(proposed_threshold / diff) if diff < 0.1: - # this means we accept up to an order of magnitude + # this means we accept up to an order of magnitude, this is hence an outlier values_outlier.append(v) continue + # if this is reached, the value is worse than the proposed threshold but only by less than one order of magnitude values_central.append(v) + # now get the means of this region with their std for both central and outliers mean_central = np.mean(values_central) std_central = np.std(values_central) if np.any(values_outlier): @@ -627,16 +685,22 @@ def initialise_regions(evaluator, regions): else: mean_outlier = None std_outlier = None + # add these mean and std values as two different test limits evaluator.add_limits(object_name, metric_name, TestLimits("regions_tight", mean_central, (std_central, std_central))) evaluator.add_limits(object_name, metric_name, TestLimits("regions_loose", mean_outlier, (std_outlier, std_outlier))) def run_macro(cmd, log_file, cwd=None): + """ + Wrapper to run a command line + """ p = Popen(split(cmd), cwd=cwd, stdout=PIPE, stderr=STDOUT, universal_newlines=True) + # open a logfile and write to it line by line log_file = open(log_file, 'a') for line in p.stdout: log_file.write(line) p.wait() + # when done, close the logfile and return the cmd's return code log_file.close() return p.returncode @@ -669,9 +733,54 @@ def print_summary(rel_val, interpretations, long=False): def get_summary_path(path): + """ + Get the full path to Summary.json + + If a directory is given, look for the file inside + """ if isdir(path): path = join(path, "Summary.json") if exists(path): return path print(f"ERROR: Cannot neither find {path}.") return None + + +def copy_overlays(rel_val, input_dir, output_dir): + """ + copy overlay plots in this summary from the input directory to the output directory + """ + input_dir = abspath(input_dir) + output_dir = abspath(output_dir) + + if not exists(input_dir): + print(f"ERROR: Input directory {input_dir} does not exist") + return 1 + + in_out_same = input_dir == output_dir + + input_dir_new = input_dir + "_tmp" + if in_out_same: + # move input directory + rename(input_dir, input_dir_new) + input_dir = input_dir_new + + if not exists(output_dir): + makedirs(output_dir) + + object_names, _ = rel_val.get_result_per_metric_and_test() + object_names = list(set(object_names)) + + ret = 0 + for object_name in object_names: + filename=join(input_dir, f"{object_name}.png") + if exists(filename): + copy(filename, output_dir) + else: + print(f"File {filename} not found.") + ret = 1 + + if in_out_same: + rmtree(input_dir) + + return ret From 7732510ecf82fb0b42012b9ca1cb9ccfee793cd5 Mon Sep 17 00:00:00 2001 From: benedikt-voelkel Date: Mon, 5 Feb 2024 12:24:44 +0100 Subject: [PATCH 015/101] [SimWF] Comments for anchored WF script (#1437) * remove `currenttime` and use `timestamp` everywhere * add comments in determine_timestamp, especially making clear where seconds, milliseconds or microseconds are used --- MC/bin/o2dpg_sim_workflow_anchored.py | 66 +++++++++++++++++---------- 1 file changed, 43 insertions(+), 23 deletions(-) diff --git a/MC/bin/o2dpg_sim_workflow_anchored.py b/MC/bin/o2dpg_sim_workflow_anchored.py index 2feba79ce..1876e4877 100755 --- a/MC/bin/o2dpg_sim_workflow_anchored.py +++ b/MC/bin/o2dpg_sim_workflow_anchored.py @@ -259,33 +259,56 @@ def determine_timestamp(sor, eor, splitinfo, cycle, ntf, HBF_per_timeframe = 256 Determines the timestamp and production offset variable based on the global properties of the production (MC split, etc) and the properties of the run. ntf is the number of timeframes per MC job + + Args: + sor: int + start-of-run in milliseconds since epoch + eor: int + end-of-run in milliseconds since epoch + splitinfo: tuple (int, int) + splitinfo[0]: split ID of this job + splitinfo[1]: total number of jobs to split into + cycle: int + cycle of this productions. Typically a run is not entirely filled by and anchored simulation + but only a proportion of events is simulated. + With increasing number of cycles, the data run is covered more and more. + ntf: int + number of timeframes + HBF_per_timeframe: int + number of orbits per timeframe + Returns: + int: timestamp in milliseconds + int: production offset aka "which timeslot in this production to simulate" """ totaljobs = splitinfo[1] thisjobID = splitinfo[0] - print (f"Start-of-run : {sor}") - print (f"End-of-run : {eor}") - time_length_inmus = 1000*(eor - sor) # time length in micro seconds - timestamp_delta = time_length_inmus / totaljobs + # length of this run in micro seconds, since we use the orbit duration in micro seconds + time_length_inmus = 1000 * (eor - sor) + # figure out how many timeframes fit into this run range + # take the number of orbits per timeframe and multiply by orbit duration to calculate how many timeframes fit into this run ntimeframes = time_length_inmus / (HBF_per_timeframe * LHCOrbitMUS) - norbits = time_length_inmus / LHCOrbitMUS + # also calculate how many orbits fit into the run range print (f"This run has space for {ntimeframes} timeframes") - print (f"This run has {norbits} orbits") - # ntimeframes is the total number of timeframes possible - # if we have totaljobs number of jobs - maxtimeframesperjob = ntimeframes // totaljobs - orbitsperjob = norbits // totaljobs + # figure out how many timeframes can maximally be covered by one job + maxtimeframesperjob = ntimeframes / totaljobs print (f"Each job can do {maxtimeframesperjob} maximally at a prod split of {totaljobs}") - print (f"With each job doing {ntf} timeframes, this corresponds to a filling rate of ", ntf/maxtimeframesperjob) + print (f"With each job doing {ntf} timeframes, this corresponds to a filling rate of {ntf / maxtimeframesperjob}") # filling rate should be smaller than 100% assert(ntf <= maxtimeframesperjob) - maxcycles = maxtimeframesperjob // ntf - print (f"We can do this amount of cycle iterations to achieve 100%: ", maxcycles) + # each cycle populates more and more run range. The maximum number of cycles to populate the run fully is: + maxcycles = maxtimeframesperjob / ntf + print (f"We can do this amount of cycle iterations to achieve 100%: {maxcycles}") + # overall, we have maxcycles * totaljobs slots to fill the run range with ntf timeframes per slot + # figure out in which slot to simulate production_offset = int(thisjobID * maxcycles) + cycle + # add the time difference of this slot to start-of-run to get the final timestamp timestamp_of_production = sor + production_offset * ntf * HBF_per_timeframe * LHCOrbitMUS / 1000 + # this is a closure test. If we had prefect floating point precision everywhere, it wouldn't fail. + # But since we don't have that and there are some int casts as well, better check again. assert (timestamp_of_production >= sor) assert (timestamp_of_production <= eor) return int(timestamp_of_production), production_offset @@ -323,16 +346,17 @@ def main(): exit (1) first_orbit = ctp_scalers.getOrbitLimit().first + # SOR and EOR values in milliseconds sor = ctp_scalers.getTimeLimit().first eor = ctp_scalers.getTimeLimit().second if args.use_rct_info: first_orbit = GLOparams["FirstOrbit"] + # SOR and EOR values in milliseconds sor = GLOparams["SOR"] eor = GLOparams["EOR"] - # determine timestamp, and production offset for the final - # MC job to run + # determine timestamp, and production offset for the final MC job to run timestamp, prod_offset = determine_timestamp(sor, eor, [args.split_id - 1, args.prod_split], args.cycle, args.tf, GLOparams["OrbitsPerTF"]) # this is anchored to @@ -341,13 +365,8 @@ def main(): print ("Determined timestamp to be : ", timestamp) print ("Determined offset to be : ", prod_offset) - currentorbit = first_orbit + prod_offset * GLOparams["OrbitsPerTF"] # orbit number at production start - currenttime = sor + prod_offset * GLOparams["OrbitsPerTF"] * LHCOrbitMUS // 1000 # timestamp in milliseconds - - print ("Production put at time : " + str(currenttime)) - # retrieve the GRPHCIF object - grplhcif = retrieve_GRPLHCIF(ccdbreader, int(currenttime)) + grplhcif = retrieve_GRPLHCIF(ccdbreader, int(timestamp)) eCM = grplhcif.getSqrtS() A1 = grplhcif.getAtomicNumberB1() A2 = grplhcif.getAtomicNumberB2() @@ -385,8 +404,9 @@ def main(): effTrigger = 28.0 # this is ZDC else: effTrigger = 0.759 - - rate = retrieve_MinBias_CTPScaler_Rate(ctp_scalers, currenttime/1000., effTrigger, grplhcif.getBunchFilling().getNBunches(), ColSystem) + + # time needs to be converted to seconds ==> timestamp / 1000 + rate = retrieve_MinBias_CTPScaler_Rate(ctp_scalers, timestamp/1000., effTrigger, grplhcif.getBunchFilling().getNBunches(), ColSystem) if rate != None: # if the rate calculation was successful we will use it, otherwise we fall back to some rate given as part From ead8c0ba8d2161a0c50d1829e020133b699d5a53 Mon Sep 17 00:00:00 2001 From: shahoian Date: Sun, 4 Feb 2024 03:16:03 +0100 Subject: [PATCH 016/101] Adjust TPC scaling options to O2 PR12653 Since the CTP lumi is requested by default even if scaling with IDCs is used, the parameters to configure TPC corrections TPC_CORR_SCALING are modidied as: ALIEN_JDL_MEANIRFORTPC (by default: empty) >0 : use at face value to set TPCCorrMap.lumiMean, overriding mean lumi of the map <0 : disable ALL corrections ALIEN_JDL_INSTIRFORTPC (by default: CTP) >0 : use at face value to set TPCCorrMap.lumiInst which will override ONLY CTP lumi value. (which does not affect the lumi used for scaling with IDCs) CTP: use Lumi from the CTP Data (as above: not necessarilly for corrections scaling) CTPCCDB: extract CTP lumi from CCDB CTP scalers and override CTP Lumi TPCCorrMap.lumiInst (same comment as above) New variable defining what is used for corrections scaling was added ALIEN_JDL_TPCSCALINGSOURCE (by default set to CTP) NO_SCALING : no scaling applied ( --lumi-type is set to 0) CTP: inst lumi is used also for scaling ( --lumi-type 1) IDCCCDB: use for scaling the TPC scaler from IDCs. --- .../configurations/asyncReco/setenv_extra.sh | 127 +++++++++++------- 1 file changed, 75 insertions(+), 52 deletions(-) diff --git a/DATA/production/configurations/asyncReco/setenv_extra.sh b/DATA/production/configurations/asyncReco/setenv_extra.sh index 4c95be58a..adbdbaaec 100644 --- a/DATA/production/configurations/asyncReco/setenv_extra.sh +++ b/DATA/production/configurations/asyncReco/setenv_extra.sh @@ -296,87 +296,110 @@ elif [[ $ALIGNLEVEL == 1 ]]; then CUT_MATCH_CHI2=160 export ITSTPCMATCH="tpcitsMatch.safeMarginTimeCorrErr=2.;tpcitsMatch.cutMatchingChi2=$CUT_MATCH_CHI2;;tpcitsMatch.crudeAbsDiffCut[0]=6;tpcitsMatch.crudeAbsDiffCut[1]=6;tpcitsMatch.crudeAbsDiffCut[2]=0.3;tpcitsMatch.crudeAbsDiffCut[3]=0.3;tpcitsMatch.crudeAbsDiffCut[4]=5;tpcitsMatch.crudeNSigma2Cut[0]=100;tpcitsMatch.crudeNSigma2Cut[1]=100;tpcitsMatch.crudeNSigma2Cut[2]=100;tpcitsMatch.crudeNSigma2Cut[3]=100;tpcitsMatch.crudeNSigma2Cut[4]=100;" - # enabling TPC calibration scaling + #-------------------------------------- TPC corrections ----------------------------------------------- + # we need to provide to TPC + # 1) interaction rate info (lumi) used for scaling or errors and possible of the corrections : INST_IR_FOR_TPC + # 2) what to use for corrections scaling (lumi or IDC scalers or no scaling at all) : TPC_SCALING_SOURCE # the default is to use CTP, unless specified differently in the JDL... INST_IR_FOR_TPC=${ALIEN_JDL_INSTIRFORTPC-CTP} - #...but for 2022 data, where we will rely on different settings depending on the period; note that if ALIEN_JDL_INSTIRFORTPC is set, it has precedence + TPC_SCALING_SOURCE=${ALIEN_JDL_TPCSCALINGSOURCE-CTP} + # MEAN_IR_FOR_TPC allows (1) to alter the map mean IR if >0 or (2) disable all corrections if <0 + MEAN_IR_FOR_TPC=${ALIEN_JDL_MEANIRFORTPC-} + if [[ $ALIEN_JDL_LPMANCHORYEAR == "2022" ]]; then - INST_IR_FOR_TPC=${ALIEN_JDL_INSTIRFORTPC-CTPCCDB} + INST_IR_FOR_TPC=${ALIEN_JDL_INSTIRFORTPC-CTPCCDB} # by default override inst.IR by the mean IR from CCDB and use it for scaling fi if [[ $PERIOD == "LHC22s" ]]; then - INST_IR_FOR_TPC=${ALIEN_JDL_INSTIRFORTPC-0} # in this way, only TPC/Calib/CorrectionMaps is applied, and we know that for 22s it is the same as TPC/Calib/CorrectionMapsRef; note that if ALIEN_JDL_INSTIRFORTPC is set, it has precedence + TPC_SCALING_SOURCE=${ALIEN_JDL_TPCSCALINGSOURCE-NO_SCALING} # in this way, only TPC/Calib/CorrectionMaps is applied, and we know that for 22s it is the same as TPC/Calib/CorrectionMapsRef; elif [[ $PERIOD == @(LHC22c|LHC22d|LHC22e|JUN|LHC22f) ]]; then INST_IR_FOR_TPC=${ALIEN_JDL_INSTIRFORTPC-1} # scaling with very small value for low IR fi - # in MC, we set it to a negative value to disable completely the corrections (not yet operational though, please check O2); + # in MC, we disable completely the corrections # note that if ALIEN_JDL_INSTIRFORTPC is set, it has precedence if [[ $ALIEN_JDL_LPMPRODUCTIONTYPE == "MC" ]] && [[ $O2DPG_ENABLE_TPC_DISTORTIONS != "ON" ]]; then - INST_IR_FOR_TPC=${ALIEN_JDL_INSTIRFORTPC--1} + MEAN_IR_FOR_TPC=${ALIEN_JDL_MEANIRFORTPC--1} fi - # now we set the options + DISABLE_CORRECTIONS= [[ -n "$ALIEN_JDL_MSHAPE_CORRECTION" && $ALIEN_JDL_MSHAPE_CORRECTION == "0" ]] && ENABLE_MSHAPE=0 || ENABLE_MSHAPE=1 - if [[ $INST_IR_FOR_TPC -gt 0 ]]; then # externally imposed IR for scaling - echo "Applying externally provided IR for scaling, $INST_IR_FOR_TPC Hz" - export TPC_CORR_SCALING+=";TPCCorrMap.lumiInst=$INST_IR_FOR_TPC" - elif [[ $INST_IR_FOR_TPC == 0 ]]; then # when zero, only the TPC/Calib/CorrectionMaps is applied - echo "Passed valued for scaling is zero, only TPC/Calib/CorrectionMaps will be applied" - export TPC_CORR_SCALING+=";TPCCorrMap.lumiInst=$INST_IR_FOR_TPC" - elif [[ $INST_IR_FOR_TPC -lt 0 ]]; then # do not apply any correction - echo "Passed valued for scaling is smaller than zero, no scaling will be applied" - echo "NOTA BENE: In the future, this value will signal to not apply any correction at all, which is not operational yet (but please check, as it depends on O2)" - ENABLE_MSHAPE=0 + if [[ -n $MEAN_IR_FOR_TPC ]] ; then # firs check if corrections were not disabled via MEAN_IR_FOR_TPC + if [[ $MEAN_IR_FOR_TPC -gt 0 ]] ; then # positive value overrides map mean lumi + echo "Applying externally provided map mean IR for scaling, $MEAN_IR_FOR_TPC Hz" + export TPC_CORR_SCALING+=";TPCCorrMap.lumiMean=$MEAN_IR_FOR_TPC;" # take mean lumy at face value + elif [[ $MEAN_IR_FOR_TPC -lt 0 ]] ; then # negative mean lumi disables all corrections + echo "Negative MEAN_IR_FOR_TPC -> all TPC corrections will be ignored" + export TPC_CORR_SCALING+=" --lumi-type 0 " + export TPC_CORR_SCALING+=";TPCCorrMap.lumiMean=$MEAN_IR_FOR_TPC;" + ENABLE_MSHAPE=0 + DISABLE_CORRECTIONS=1 + else + echo "Did not recognize MEAN_IR_FOR_TPC = $MEAN_IR_FOR_TPC" + return 1 + fi + fi # MEAN_IR_FOR_TPC overridden + + # set IR for TPC, even if it is not used for corrections scaling + if [[ $INST_IR_FOR_TPC -gt 0 ]]; then # externally imposed CTP IR + echo "Applying externally provided istantaneous IR $INST_IR_FOR_TPC Hz" export TPC_CORR_SCALING+=";TPCCorrMap.lumiInst=$INST_IR_FOR_TPC" - elif [[ $INST_IR_FOR_TPC == "CTPCCDB" ]]; then # using what we have in the CCDB CTP counters, extracted at the beginning of the script - echo "Using CTP CCDB which gave the mean IR of the run at the beginning of the script ($RUN_IR Hz)" - export TPC_CORR_SCALING+=";TPCCorrMap.lumiInst=$RUN_IR" elif [[ $INST_IR_FOR_TPC == "CTP" ]]; then if ! has_detector CTP ; then - echo "TPC correction with CTP Lumi is requested but CTP is not in the WORKFLOW_DETECTORS=$WORKFLOW_DETECTORS" + echo "CTP Lumi is for TPC corrections but CTP is not in the WORKFLOW_DETECTORS=$WORKFLOW_DETECTORS" return 1 fi echo "Using CTP inst lumi stored in data" - export TPC_CORR_SCALING+=" --lumi-type 1 " - elif [[ $INST_IR_FOR_TPC == "IDCCCDB" ]]; then - echo "TPC correction with IDC from CCDB will be used" - export TPC_CORR_SCALING+=" --lumi-type 2 " - if [[ $ALIEN_JDL_USEDERIVATIVESFORSCALING == "1" ]]; then - export TPC_CORR_SCALING+=" --corrmap-lumi-mode 1 " - fi - else - echo "Unknown setting for INST_IR_FOR_TPC = $INST_IR_FOR_TPC (with ALIEN_JDL_INST_IR_FOR_TPC = $ALIEN_JDL_INST_IR_FOR_TPC)" + elif [[ $INST_IR_FOR_TPC == "CTPCCDB" ]]; then # using what we have in the CCDB CTP counters, extracted at the beginning of the script + echo "Using CTP CCDB which gave the mean IR of the run at the beginning of the script ($RUN_IR Hz)" + export TPC_CORR_SCALING+=";TPCCorrMap.lumiInst=$RUN_IR" + else echo "Unknown setting for INST_IR_FOR_TPC = $INST_IR_FOR_TPC (with ALIEN_JDL_INST_IR_FOR_TPC = $ALIEN_JDL_INST_IR_FOR_TPC)" return 1 fi - if [[ $ENABLE_MSHAPE == "1" ]]; then - export TPC_CORR_SCALING+=" --enable-M-shape-correction " + # now set the source of the corrections + if [[ $DISABLE_CORRECTIONS != 1 ]] ; then + if [[ $TPC_SCALING_SOURCE == "NO_SCALING" ]]; then + echo "NO SCALING is requested: only TPC/Calib/CorrectionMapsV2... will be applied" + export TPC_CORR_SCALING+=" --lumi-type 0 " + elif [[ $TPC_SCALING_SOURCE == "CTP" ]]; then + echo "CTP Lumi from data will be used for TPC scaling" + export TPC_CORR_SCALING+=" --lumi-type 1 " + elif [[ $TPC_SCALING_SOURCE == "IDCCCDB" ]]; then + echo "TPC correction with IDC from CCDB will be used" + export TPC_CORR_SCALING+=" --lumi-type 2 " + if [[ $ALIEN_JDL_USEDERIVATIVESFORSCALING == "1" ]]; then + export TPC_CORR_SCALING+=" --corrmap-lumi-mode 1 " + fi + else + echo "Unknown setting for TPC_SCALING_SOURCE = $TPC_SCALING_SOURCE (with ALIEN_JDL_TPCSCALINGSOURCE = $ALIEN_JDL_TPCSCALINGSOURCE)" + fi + fi + + if ! has_detector CTP ; then + echo "CTP is not in the list of detectors, disabling CTP Lumi input request" + export TPC_CORR_SCALING+=" --disable-ctp-lumi-request " fi - if [[ -n $ALIEN_JDL_MEANIRFORTPC && $ALIEN_JDL_MEANIRFORTPC > 0 ]]; then # externally imposed TPC map mean IR for scaling - export TPC_CORR_SCALING+=";TPCCorrMap.lumiMean=$ALIEN_JDL_MEANIRFORTPC" + if [[ $ENABLE_MSHAPE == "1" ]]; then + export TPC_CORR_SCALING+=" --enable-M-shape-correction " fi - - if [[ $ALIEN_JDL_LPMANCHORYEAR == "2023" ]] && [[ $BEAMTYPE == "PbPb" ]] && ([[ -z $INST_IR_FOR_TPC ]] || [[ $INST_IR_FOR_TPC == "CTP" ]]); then - echo "We are in PbPb 2023, the default - for now - is to use CTP in the data" - unset TPC_CORR_SCALING - export TPC_CORR_SCALING=";TPCCorrMap.lumiInstFactor=2.414;TPCCorrMap.lumiMean=0 --lumi-type 1 " - if [[ $SCALE_WITH_ZDC == 0 ]]; then - # scaling with FT0 - if [[ $SCALE_WITH_FT0 == 1 ]]; then - export TPC_CORR_SCALING=" --ctp-lumi-source 1 --lumi-type 1 TPCCorrMap.lumiInstFactor=135.;TPCCorrMap.lumiMean=0" - else - echo "Neither ZDC nor FT0 are in the run, and this is from 2023 PbPb: we cannot scale TPC ditortion corrections, aborting..." - return 1 - fi - fi - if [[ $ENABLE_MSHAPE == "1" ]]; then - export TPC_CORR_SCALING+=" --enable-M-shape-correction " + + if [[ $ALIEN_JDL_LPMANCHORYEAR == "2023" ]] && [[ $BEAMTYPE == "PbPb" ]] ; then + if [[ $SCALE_WITH_ZDC == 1 ]]; then + echo "For 2023 PbPb ZDC inst. lumi applying factor 2.414" + export TPC_CORR_SCALING+=";TPCCorrMap.lumiInstFactor=2.414;" + elif [[ $SCALE_WITH_FT0 == 1 ]]; then + echo "For 2023 PbPb FT0 inst. lumi applying factor 135." + export TPC_CORR_SCALING+="TPCCorrMap.lumiInstFactor=135.;" + else + echo "Neither ZDC nor FT0 are in the run, and this is from 2023 PbPb: we cannot scale TPC ditortion corrections, aborting..." + return 1 fi fi echo "Final setting for TPC scaling is:" echo $TPC_CORR_SCALING + #-------------------------------------- TPC corrections (end)-------------------------------------------- if [[ $PERIOD != @(LHC22c|LHC22d|LHC22e|JUN|LHC22f) ]] ; then echo "Setting TPCCLUSTERTIMESHIFT to 0" @@ -385,8 +408,8 @@ elif [[ $ALIGNLEVEL == 1 ]]; then echo "We are in period $PERIOD, we need to keep the correction for the TPC cluster time, since no new vdrift was extracted" fi - TRACKTUNETPCINNER="trackTuneParams.tpcCovInnerType=1;trackTuneParams.tpcCovInner[0]=0.01;trackTuneParams.tpcCovInner[1]=1.;trackTuneParams.tpcCovInner[2]=4e-7;trackTuneParams.tpcCovInner[3]=4.e-5;trackTuneParams.tpcCovInner[4]=6.8e-6;" - TRACKTUNETPCOUTER="trackTuneParams.tpcCovOuterType=1;trackTuneParams.tpcCovOuter[0]=0.01;trackTuneParams.tpcCovOuter[1]=1.;trackTuneParams.tpcCovOuter[2]=4e-7;trackTuneParams.tpcCovOuter[3]=4.e-5;trackTuneParams.tpcCovOuter[4]=6.8e-6;" + TRACKTUNETPCINNER="trackTuneParams.tpcCovInnerType=1;trackTuneParams.tpcCovInner[0]=0.1;trackTuneParams.tpcCovInner[1]=1.;trackTuneParams.tpcCovInner[2]=6.3e-4;trackTuneParams.tpcCovInner[3]=6.3e-3;trackTuneParams.tpcCovInner[4]=2.6e-3;" + TRACKTUNETPCOUTER="trackTuneParams.tpcCovOuterType=1;trackTuneParams.tpcCovOuter[0]=0.1;trackTuneParams.tpcCovOuter[1]=1.;trackTuneParams.tpcCovOuter[2]=6.3e-4;trackTuneParams.tpcCovOuter[3]=6.3e-3;trackTuneParams.tpcCovOuter[4]=2.6e-3;" fi From cd07a5c91c559d20481093d7769ec82478a639c3 Mon Sep 17 00:00:00 2001 From: swenzel Date: Fri, 2 Feb 2024 13:08:02 +0100 Subject: [PATCH 017/101] prototypic addition of TPC time series O2-4612 https://its.cern.ch/jira/browse/O2-4612 --- MC/bin/o2dpg_sim_workflow.py | 16 +++++++++++++++- 1 file changed, 15 insertions(+), 1 deletion(-) diff --git a/MC/bin/o2dpg_sim_workflow.py b/MC/bin/o2dpg_sim_workflow.py index 70dcafb71..5f116bab5 100755 --- a/MC/bin/o2dpg_sim_workflow.py +++ b/MC/bin/o2dpg_sim_workflow.py @@ -1421,9 +1421,23 @@ def addQCPerTF(taskName, needs, readerCommand, configFilePath, objectsFile=''): # Enable CTP readout replay for triggered detectors (EMCAL, HMPID, PHOS/CPV, TRD) # Needed untill triggers are supported in CTP simulation AODtask['cmd'] += ' --ctpreadout-create 1' - workflow['stages'].append(AODtask) + # TPC - time-series objects + # initial implementation taken from comments in https://its.cern.ch/jira/browse/O2-4612 + # TODO: this needs to be made configurable (as a function of which detectors are actually present) + tpctsneeds = [ TPCRECOtask['name'], + ITSTPCMATCHtask['name'], + TOFTPCMATCHERtask['name'], + PVFINDERtask['name'] + ] + TPCTStask = createTask(name='tpctimeseries_'+str(tf), needs=tpctsneeds, tf=tf, cwd=timeframeworkdir, lab=["RECO"], mem='2000', cpu='1') + TPCTStask['cmd'] = 'o2-global-track-cluster-reader --disable-mc --cluster-types "TOF" --track-types "ITS,TPC,ITS-TPC,ITS-TPC-TOF,ITS-TPC-TRD-TOF"' + TPCTStask['cmd'] += ' --primary-vertices ' + TPCTStask['cmd'] += ' | o2-tpc-time-series-workflow --enable-unbinned-root-output --sample-unbinned-tsallis --sampling-factor 0.1 ' + TPCTStask['cmd'] += putConfigValuesNew() + ' ' + getDPL_global_options(bigshm=True) + workflow['stages'].append(TPCTStask) + # AOD merging / combination step (as individual stages) --> for the moment deactivated in favor or more stable global merging """ aodmergerneeds = [ AODtask['name'] ] From e568993c65a8c71ba99afd8df0711835a7a73251 Mon Sep 17 00:00:00 2001 From: Chiara Zampolli Date: Fri, 2 Feb 2024 13:15:59 +0100 Subject: [PATCH 018/101] Bug fix for sampling --- DATA/production/configurations/asyncReco/setenv_extra.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/DATA/production/configurations/asyncReco/setenv_extra.sh b/DATA/production/configurations/asyncReco/setenv_extra.sh index adbdbaaec..bc0797b71 100644 --- a/DATA/production/configurations/asyncReco/setenv_extra.sh +++ b/DATA/production/configurations/asyncReco/setenv_extra.sh @@ -613,7 +613,7 @@ if [[ $ALIEN_JDL_EXTRACTTIMESERIES == 1 ]]; then if [[ ! -z "$ALIEN_JDL_SAMPLINGFACTORTIMESERIES" ]]; then # this takes priority export SAMPLINGFACTORTIMESERIES=${ALIEN_JDL_SAMPLINGFACTORTIMESERIES} fi - export ARGS_EXTRA_PROCESS_o2_tpc_time_series_workflow="$ARGS_EXTRA_PROCESS_o2_tpc_time_series_workflow --sampling-factor ${ALIEN_JDL_SAMPLINGFACTORTIMESERIES}" + export ARGS_EXTRA_PROCESS_o2_tpc_time_series_workflow="$ARGS_EXTRA_PROCESS_o2_tpc_time_series_workflow --sampling-factor ${SAMPLINGFACTORTIMESERIES}" fi fi From 232fdbde236da69d9f2240bbc6b1324dc2a99761 Mon Sep 17 00:00:00 2001 From: ValerieRamillien <95756671+ValerieRamillien@users.noreply.github.com> Date: Tue, 16 Jan 2024 10:04:11 +0100 Subject: [PATCH 019/101] Update mid.json proposed configuration changes --- DATA/production/qc-async/mid.json | 27 ++++++++------------------- 1 file changed, 8 insertions(+), 19 deletions(-) diff --git a/DATA/production/qc-async/mid.json b/DATA/production/qc-async/mid.json index 9c6d6997e..7502d9f30 100644 --- a/DATA/production/qc-async/mid.json +++ b/DATA/production/qc-async/mid.json @@ -3,23 +3,18 @@ "config": { "database": { "implementation": "CCDB", - "host": "ali-qcdb.cern.ch:8083", - "username": "not_applicable", - "password": "not_applicable", - "name": "not_applicable" + "host": "ccdb-test.cern.ch:8080" }, "Activity": { - "number": "42", - "type": "2" }, "monitoring": { - "url": "influxdb-unix:///tmp/telegraf.sock" + "url": "infologger:///debug?qc" }, "consul": { - "url": "null:8500" + "url": "" }, "conditionDB": { - "url": "null:8083" + "url": "ccdb-test.cern.ch:8080" } }, "tasks": { @@ -30,11 +25,9 @@ "moduleName": "QcMID", "detectorName": "MID", "cycleDurationSeconds": "60", - "maxNumberCycles": "-1", "dataSource": { "type": "direct", - "query": "digits:MID/DATA;digits_rof:MID/DATAROF", - "query_comment": "100% sampling" + "query": "digits:MID/DATA;digits_rof:MID/DATAROF" } }, "MIDClusters": { @@ -44,11 +37,9 @@ "moduleName": "QcMID", "detectorName": "MID", "cycleDurationSeconds": "60", - "maxNumberCycles": "-1", "dataSource": { "type": "direct", - "query": "clusters:MID/TRACKCLUSTERS;clusterrofs:MID/TRCLUSROFS", - "query_comment": "100% sampling" + "query": "clusters:MID/TRACKCLUSTERS;clusterrofs:MID/TRCLUSROFS" } }, "MIDTracks": { @@ -58,11 +49,9 @@ "moduleName": "QcMID", "detectorName": "MID", "cycleDurationSeconds": "60", - "maxNumberCycles": "-1", "dataSource": { "type": "direct", - "query": "tracks:MID/TRACKS;trackrofs:MID/TRACKROFS", - "query_comment": "100% sampling" + "query": "tracks:MID/TRACKS;trackrofs:MID/TRACKROFS" } } }, @@ -124,4 +113,4 @@ } }, "dataSamplingPolicies": [] -} \ No newline at end of file +} From 714fa53d4ea72ac1a33737adad40bf3ef39bda5e Mon Sep 17 00:00:00 2001 From: ValerieRamillien <95756671+ValerieRamillien@users.noreply.github.com> Date: Mon, 29 Jan 2024 14:35:07 +0100 Subject: [PATCH 020/101] Update mid.json correction --- DATA/production/qc-async/mid.json | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/DATA/production/qc-async/mid.json b/DATA/production/qc-async/mid.json index 7502d9f30..4f49f2ce9 100644 --- a/DATA/production/qc-async/mid.json +++ b/DATA/production/qc-async/mid.json @@ -3,7 +3,7 @@ "config": { "database": { "implementation": "CCDB", - "host": "ccdb-test.cern.ch:8080" + "host": "ccdb-test.cern.ch:8080" }, "Activity": { }, @@ -27,7 +27,7 @@ "cycleDurationSeconds": "60", "dataSource": { "type": "direct", - "query": "digits:MID/DATA;digits_rof:MID/DATAROF" + "query": "digits:MID/DATA;digits_rof:MID/DATAROF" } }, "MIDClusters": { @@ -39,7 +39,7 @@ "cycleDurationSeconds": "60", "dataSource": { "type": "direct", - "query": "clusters:MID/TRACKCLUSTERS;clusterrofs:MID/TRCLUSROFS" + "query": "clusters:MID/TRACKCLUSTERS;clusterrofs:MID/TRCLUSROFS" } }, "MIDTracks": { @@ -51,7 +51,7 @@ "cycleDurationSeconds": "60", "dataSource": { "type": "direct", - "query": "tracks:MID/TRACKS;trackrofs:MID/TRACKROFS" + "query": "tracks:MID/TRACKS;trackrofs:MID/TRACKROFS" } } }, From af9d2a8c221a004c51b4222e5a0e51bf8dd3215e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicol=C3=B2=20Jacazio?= Date: Tue, 6 Feb 2024 09:54:04 +0100 Subject: [PATCH 021/101] TOF: add good match in anQC (#1425) * TOF: add good match in anQC * Update analysis-testing-data.json --- .../default/pbpb/analysis-testing-data.json | 235 +++++++++++++++--- .../default/pp/analysis-testing-data.json | 9 +- .../json/default/pp/analysis-testing-mc.json | 6 +- 3 files changed, 217 insertions(+), 33 deletions(-) diff --git a/MC/config/analysis_testing/json/default/pbpb/analysis-testing-data.json b/MC/config/analysis_testing/json/default/pbpb/analysis-testing-data.json index 3b8c49112..648b471f3 100644 --- a/MC/config/analysis_testing/json/default/pbpb/analysis-testing-data.json +++ b/MC/config/analysis_testing/json/default/pbpb/analysis-testing-data.json @@ -54,8 +54,10 @@ "processMFT": "true" }, "bc-selection-task": { + "ITSROFrameBorderMargin": "30", "processRun2": "false", - "processRun3": "true" + "processRun3": "true", + "triggerBcShift": "999" }, "cell-monitor": { "maxCellTimeMain": "100", @@ -128,12 +130,14 @@ "processRun3": "true" }, "event-selection-task": { + "customDeltaBC": "-1", "isMC": "false", "muonSelection": "0", "processRun2": "false", "processRun3": "true", - "syst": "pp" + "syst": "PbPb" }, + "ft0-corrected-table": "", "lambdakzero-builder": { "ccdb-url": "http://alice-ccdb.cern.ch", "createV0CovMats": "-1", @@ -165,6 +169,59 @@ "v0radius": "1" }, "multiplicity-table": { + "ccdbpath": "Centrality/Calibration", + "ccdburl": "http://alice-ccdb.cern.ch", + "doVertexZeq": "1", + "enabledTables": { + "labels_cols": [ + "Enable" + ], + "labels_rows": [ + "FV0Mults", + "FT0Mults", + "FDDMults", + "ZDCMults", + "TrackletMults", + "TPCMults", + "PVMults", + "MultsExtra", + "MultZeqs", + "MultsExtraMC" + ], + "values": [ + [ + "-1" + ], + [ + "-1" + ], + [ + "-1" + ], + [ + "-1" + ], + [ + "-1" + ], + [ + "-1" + ], + [ + "-1" + ], + [ + "-1" + ], + [ + "-1" + ], + [ + "-1" + ] + ] + }, + "fractionOfEvents": "2", "processRun2": "false", "processRun3": "true" }, @@ -293,10 +350,125 @@ "rapidity": "0.5", "v0cospa": "0.995" }, - "tof-pid-qa": { + "timestamp-task": {}, + "tof-event-time": { + "ccdb-timestamp": "-1", + "ccdb-url": "", + "enableTimeDependentResponse": "false", + "fatalOnPassNotAvailable": "true", + "inheritFromBaseTask": "true", + "loadResponseFromCCDB": "false", + "maxEvTimeTOF": "100000", + "maxMomentum": "2", + "maxNtracksInSet": "10", + "minMomentum": "0.5", + "paramFileName": "", + "parametrizationPath": "TOF/Calib/Params", + "passName": "", + "processFT0": "true", + "processNoFT0": "false", + "processOnlyFT0": "false", + "processRun2": "false", + "sel8TOFEvTime": "false" + }, + "tof-pid": { + "ccdb-timestamp": "-1", + "ccdb-url": "", + "enableParticle": { + "labels_cols": [ + "Enable" + ], + "labels_rows": [ + "El", + "Mu", + "Pi", + "Ka", + "Pr", + "De", + "Tr", + "He", + "Al" + ], + "values": [ + [ + "-1" + ], + [ + "-1" + ], + [ + "-1" + ], + [ + "-1" + ], + [ + "-1" + ], + [ + "-1" + ], + [ + "-1" + ], + [ + "-1" + ], + [ + "-1" + ] + ] + }, + "enableTimeDependentResponse": "false", + "fatalOnPassNotAvailable": "true", + "inheritFromBaseTask": "true", + "loadResponseFromCCDB": "false", + "paramFileName": "", + "parametrizationPath": "", + "passName": "", + "processWSlice": "true", + "processWoSlice": "false", + "timeShiftCCDBPath": "" + }, + "tof-pid-beta": {}, + "tof-pid-beta-qa": { "applyEvSel": "2", - "applyRapidityCut": "1", + "lastTrdLayerForTrdMatch": "5", + "logAxis": "0", + "maxP": "5", + "minP": "0.100000001", + "nBinsP": "400", "requireGoodMatchTracks": "true", + "splitSignalPerCharge": "true", + "splitSignalPerEvTime": "true", + "splitTrdTracks": "false", + "tofBetaBins": { + "values": [ + "4000", + "0", + "2" + ] + }, + "tofMassBins": { + "values": [ + "1000", + "0", + "3" + ] + }, + "trackLengthBins": { + "values": [ + "100", + "0", + "1000" + ] + }, + "trackSelection": "1" + }, + "tof-pid-full": {}, + "tof-pid-qa": { + "applyEvSel": "2", + "applyRapidityCut": "true", "deltaBins": { "values": [ 100, @@ -304,8 +476,8 @@ 1000 ] }, - "enableEvTimeSplitting": "0", - "enableVsMomentumHistograms": "0", + "enableEvTimeSplitting": "false", + "enableVsMomentumHistograms": "false", "etaBins": { "values": [ 50, @@ -338,32 +510,33 @@ 6.283185307179586 ] }, - "processAlpha": "0", - "processDeuteron": "0", - "processElectron": "0", - "processFullAlpha": "0", - "processFullDeuteron": "1", - "processFullElectron": "0", - "processFullHelium3": "1", - "processFullKaon": "1", - "processFullMuon": "0", - "processFullPion": "1", - "processFullProton": "1", - "processFullTriton": "1", - "processHelium3": "0", - "processKaon": "0", - "processMuon": "0", - "processPion": "0", - "processProton": "0", - "processTriton": "0", - "produceDeltaTEtaPhiMap": "0", + "processAlpha": "false", + "processDeuteron": "false", + "processElectron": "false", + "processFullAlpha": "false", + "processFullDeuteron": "true", + "processFullElectron": "false", + "processFullHelium3": "true", + "processFullKaon": "true", + "processFullMuon": "false", + "processFullPion": "true", + "processFullProton": "true", + "processFullTriton": "false", + "processHelium3": "false", + "processKaon": "false", + "processMuon": "false", + "processPion": "false", + "processProton": "false", + "processTriton": "false", + "produceDeltaTEtaPhiMap": "false", "ptDeltaTEtaPhiMap": "3", - "splitSignalPerCharge": "1", + "requireGoodMatchTracks": "true", + "splitSignalPerCharge": "true", "trackLengthBins": { "values": [ - 50, - 0, - 1000 + "50", + "0", + "1000" ] }, "trackSelection": "1" @@ -500,6 +673,8 @@ "lutPath": "GLO/Param/MatLUT", "mVtxPath": "GLO/Calib/MeanVertex", "processCovariance": "true", + "processStandardWithPID": "false", + "processCovarianceWithPID": "false", "processStandard": "false" }, "track-selection": { @@ -517,4 +692,4 @@ "ft0-qa": { "isLowFlux": "false" } -} +} \ No newline at end of file diff --git a/MC/config/analysis_testing/json/default/pp/analysis-testing-data.json b/MC/config/analysis_testing/json/default/pp/analysis-testing-data.json index 817d56da9..b41440ae8 100644 --- a/MC/config/analysis_testing/json/default/pp/analysis-testing-data.json +++ b/MC/config/analysis_testing/json/default/pp/analysis-testing-data.json @@ -54,8 +54,10 @@ "processMFT": "true" }, "bc-selection-task": { + "ITSROFrameBorderMargin": "30", "processRun2": "false", - "processRun3": "true" + "processRun3": "true", + "triggerBcShift": "999" }, "cell-monitor": { "maxCellTimeMain": "100", @@ -128,6 +130,7 @@ "processRun3": "true" }, "event-selection-task": { + "customDeltaBC": "-1", "isMC": "false", "muonSelection": "0", "processRun2": "false", @@ -489,6 +492,8 @@ "lutPath": "GLO/Param/MatLUT", "mVtxPath": "GLO/Calib/MeanVertex", "processCovariance": "true", + "processStandardWithPID": "false", + "processCovarianceWithPID": "false", "processStandard": "false" }, "track-selection": { @@ -506,4 +511,4 @@ "ft0-qa": { "isLowFlux": "true" } -} +} \ No newline at end of file diff --git a/MC/config/analysis_testing/json/default/pp/analysis-testing-mc.json b/MC/config/analysis_testing/json/default/pp/analysis-testing-mc.json index 4ab7676e0..8c9bf7005 100644 --- a/MC/config/analysis_testing/json/default/pp/analysis-testing-mc.json +++ b/MC/config/analysis_testing/json/default/pp/analysis-testing-mc.json @@ -1366,15 +1366,19 @@ "lutPath": "GLO/Param/MatLUT", "mVtxPath": "GLO/Calib/MeanVertex", "processCovariance": "true", - "processStandard": "false" + "processCovarianceWithPID": "false", + "processStandard": "false", + "processStandardWithPID": "false" }, "track-selection": { "compatibilityIU": "false", + "dcaSetup": "0", "etaMax": "0.8", "etaMin": "-0.8", "isRun3": "true", "itsMatching": "1", "produceFBextendedTable": "-1", + "produceTable": "-1", "ptMax": "1e+10", "ptMin": "0.1" }, From a64891996a8d33ddfd800eeef758fb0c16545a29 Mon Sep 17 00:00:00 2001 From: Chiara Zampolli Date: Tue, 6 Feb 2024 15:47:47 +0100 Subject: [PATCH 022/101] Adjusting the value --- DATA/production/configurations/asyncReco/setenv_extra.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/DATA/production/configurations/asyncReco/setenv_extra.sh b/DATA/production/configurations/asyncReco/setenv_extra.sh index bc0797b71..6c8397ed3 100644 --- a/DATA/production/configurations/asyncReco/setenv_extra.sh +++ b/DATA/production/configurations/asyncReco/setenv_extra.sh @@ -608,7 +608,7 @@ if [[ $ALIEN_JDL_EXTRACTTIMESERIES == 1 ]]; then fi if [[ $ON_SKIMMED_DATA == 1 ]] || [[ ! -z "$ALIEN_JDL_SAMPLINGFACTORTIMESERIES" ]] ; then if [[ $ON_SKIMMED_DATA == 1 ]] ; then - SAMPLINGFACTORTIMESERIES=0.1f + SAMPLINGFACTORTIMESERIES=0.1 fi if [[ ! -z "$ALIEN_JDL_SAMPLINGFACTORTIMESERIES" ]]; then # this takes priority export SAMPLINGFACTORTIMESERIES=${ALIEN_JDL_SAMPLINGFACTORTIMESERIES} From 861db706d79da6fc40db3826ff8e3a807be3a42f Mon Sep 17 00:00:00 2001 From: Laura Serksnyte Date: Fri, 26 Jan 2024 16:18:10 +0100 Subject: [PATCH 023/101] Include QC on the cluster information per track --- DATA/production/qc-async/tpc.json | 19 +++++++++++++++++++ MC/config/QC/json/tpc-qc-standard-direct.json | 18 ++++++++++++++++++ 2 files changed, 37 insertions(+) diff --git a/DATA/production/qc-async/tpc.json b/DATA/production/qc-async/tpc.json index 6afa9aacc..350ed4d49 100644 --- a/DATA/production/qc-async/tpc.json +++ b/DATA/production/qc-async/tpc.json @@ -68,6 +68,25 @@ "cutMaxpTPCMIPs": "0.55" } }, + "TPCTrackClusters": { + "active": "true", + "taskName": "TrackClusters", + "className": "o2::quality_control_modules::tpc::TrackClusters", + "moduleName": "QcTPC", + "detectorName": "TPC", + "cycleDurationSeconds": "300", + "dataSource": { + "type": "direct", + "query": "inputTracks:TPC/TRACKS/0;inputClusters:TPC/CLUSTERNATIVE;inputClusRefs:TPC/CLUSREFS/0" + }, + "taskParameters": { + "cutAbsEta": "1.", + "cutMinNCluster": "60", + "cutMindEdxTot": "20.", + "seed": "0", + "samplingFraction": "0.1" + } + }, "TPCTracks": { "active": "true", "taskName": "Tracks", diff --git a/MC/config/QC/json/tpc-qc-standard-direct.json b/MC/config/QC/json/tpc-qc-standard-direct.json index a1ab6692a..533bc4ea5 100644 --- a/MC/config/QC/json/tpc-qc-standard-direct.json +++ b/MC/config/QC/json/tpc-qc-standard-direct.json @@ -58,6 +58,24 @@ "TimeBinXMax": "100000" } }, + "TrackClusters": { + "active": "true", + "className": "o2::quality_control_modules::tpc::TrackClusters", + "moduleName": "QcTPC", + "detectorName": "TPC", + "cycleDurationSeconds": "60", + "dataSource": { + "type": "direct", + "query": "inputTracks:TPC/TRACKS/0;inputClusters:TPC/CLUSTERNATIVE;inputClusRefs:TPC/CLUSREFS/0" + }, + "taskParameters": { + "cutAbsEta": "1.", + "cutMinNCluster": "60", + "cutMindEdxTot": "20.", + "seed": "0", + "samplingFraction": "0.1" + } + }, "PID": { "active": "true", "className": "o2::quality_control_modules::tpc::PID", From 9e700b48c15e2318f0dbd3a5cf0266264c2f1446 Mon Sep 17 00:00:00 2001 From: shahoian Date: Wed, 7 Feb 2024 17:27:09 +0100 Subject: [PATCH 024/101] Adjust syst errors and ITS/TPC matching params, use IDCCCDB as default scaling src. 1) removed TPC cluster additive errors 2) reduces Z and tgL track syst errors 3) decreased matching chi2 cut and allowed max abs difference between TPC and ITS q/pt 4) set the matching reference X = 60 5) default TPCSCALINGSOURCE changed from CTP to IDCCCDB --- .../configurations/asyncReco/setenv_extra.sh | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/DATA/production/configurations/asyncReco/setenv_extra.sh b/DATA/production/configurations/asyncReco/setenv_extra.sh index 6c8397ed3..a34d8ffce 100644 --- a/DATA/production/configurations/asyncReco/setenv_extra.sh +++ b/DATA/production/configurations/asyncReco/setenv_extra.sh @@ -293,8 +293,8 @@ elif [[ $ALIGNLEVEL == 1 ]]; then ERROB="100e-8" [[ -z $TPCITSTIMEERR ]] && TPCITSTIMEERR="0.2" [[ -z $ITS_CONFIG || "$ITS_CONFIG" != *"--tracking-mode"* ]] && export ITS_CONFIG+=" --tracking-mode async" - CUT_MATCH_CHI2=160 - export ITSTPCMATCH="tpcitsMatch.safeMarginTimeCorrErr=2.;tpcitsMatch.cutMatchingChi2=$CUT_MATCH_CHI2;;tpcitsMatch.crudeAbsDiffCut[0]=6;tpcitsMatch.crudeAbsDiffCut[1]=6;tpcitsMatch.crudeAbsDiffCut[2]=0.3;tpcitsMatch.crudeAbsDiffCut[3]=0.3;tpcitsMatch.crudeAbsDiffCut[4]=5;tpcitsMatch.crudeNSigma2Cut[0]=100;tpcitsMatch.crudeNSigma2Cut[1]=100;tpcitsMatch.crudeNSigma2Cut[2]=100;tpcitsMatch.crudeNSigma2Cut[3]=100;tpcitsMatch.crudeNSigma2Cut[4]=100;" + CUT_MATCH_CHI2=80 + export ITSTPCMATCH="tpcitsMatch.safeMarginTimeCorrErr=2.;tpcitsMatch.XMatchingRef=60.;tpcitsMatch.cutMatchingChi2=$CUT_MATCH_CHI2;;tpcitsMatch.crudeAbsDiffCut[0]=6;tpcitsMatch.crudeAbsDiffCut[1]=6;tpcitsMatch.crudeAbsDiffCut[2]=0.3;tpcitsMatch.crudeAbsDiffCut[3]=0.3;tpcitsMatch.crudeAbsDiffCut[4]=1.5;tpcitsMatch.crudeNSigma2Cut[0]=64;tpcitsMatch.crudeNSigma2Cut[1]=64;tpcitsMatch.crudeNSigma2Cut[2]=64;tpcitsMatch.crudeNSigma2Cut[3]=64;tpcitsMatch.crudeNSigma2Cut[4]=64;" #-------------------------------------- TPC corrections ----------------------------------------------- # we need to provide to TPC @@ -302,7 +302,7 @@ elif [[ $ALIGNLEVEL == 1 ]]; then # 2) what to use for corrections scaling (lumi or IDC scalers or no scaling at all) : TPC_SCALING_SOURCE # the default is to use CTP, unless specified differently in the JDL... INST_IR_FOR_TPC=${ALIEN_JDL_INSTIRFORTPC-CTP} - TPC_SCALING_SOURCE=${ALIEN_JDL_TPCSCALINGSOURCE-CTP} + TPC_SCALING_SOURCE=${ALIEN_JDL_TPCSCALINGSOURCE-IDCCCDB} # MEAN_IR_FOR_TPC allows (1) to alter the map mean IR if >0 or (2) disable all corrections if <0 MEAN_IR_FOR_TPC=${ALIEN_JDL_MEANIRFORTPC-} @@ -408,15 +408,15 @@ elif [[ $ALIGNLEVEL == 1 ]]; then echo "We are in period $PERIOD, we need to keep the correction for the TPC cluster time, since no new vdrift was extracted" fi - TRACKTUNETPCINNER="trackTuneParams.tpcCovInnerType=1;trackTuneParams.tpcCovInner[0]=0.1;trackTuneParams.tpcCovInner[1]=1.;trackTuneParams.tpcCovInner[2]=6.3e-4;trackTuneParams.tpcCovInner[3]=6.3e-3;trackTuneParams.tpcCovInner[4]=2.6e-3;" - TRACKTUNETPCOUTER="trackTuneParams.tpcCovOuterType=1;trackTuneParams.tpcCovOuter[0]=0.1;trackTuneParams.tpcCovOuter[1]=1.;trackTuneParams.tpcCovOuter[2]=6.3e-4;trackTuneParams.tpcCovOuter[3]=6.3e-3;trackTuneParams.tpcCovOuter[4]=2.6e-3;" + TRACKTUNETPCINNER="trackTuneParams.tpcCovInnerType=1;trackTuneParams.tpcCovInner[0]=0.1;trackTuneParams.tpcCovInner[1]=0.2;trackTuneParams.tpcCovInner[2]=6.e-4;trackTuneParams.tpcCovInner[3]=6.e-4;trackTuneParams.tpcCovInner[4]=2.6e-3;" + TRACKTUNETPCOUTER="trackTuneParams.tpcCovOuterType=1;trackTuneParams.tpcCovOuter[0]=0.1;trackTuneParams.tpcCovOuter[1]=0.2;trackTuneParams.tpcCovOuter[2]=6.e-4;trackTuneParams.tpcCovOuter[3]=6.e-4;trackTuneParams.tpcCovOuter[4]=2.6e-3;" fi # adding additional cluster errors # the values below should be squared, but the validation of those values (0.01 and 0.0225) is ongoing -TPCEXTRAERR=";GPU_rec_tpc.clusterError2AdditionalY=0.1;GPU_rec_tpc.clusterError2AdditionalZ=0.15;" -TRACKTUNETPC="$TPCEXTRAERR" +#TPCEXTRAERR=";GPU_rec_tpc.clusterError2AdditionalY=0.1;GPU_rec_tpc.clusterError2AdditionalZ=0.15;" +TRACKTUNETPC=${TPCEXTRAERR-} # combining parameters [[ ! -z ${TRACKTUNETPCINNER:-} || ! -z ${TRACKTUNETPCOUTER:-} ]] && TRACKTUNETPC="$TRACKTUNETPC;trackTuneParams.sourceLevelTPC=true;$TRACKTUNETPCINNER;$TRACKTUNETPCOUTER" From 7e20a7fa5011fd5878b4eea4b88bfa5e0ef51c94 Mon Sep 17 00:00:00 2001 From: swenzel Date: Mon, 5 Feb 2024 16:46:31 +0100 Subject: [PATCH 025/101] Utility to produce and analyse file-task graph relationships A small set of tools allowing to produce a report of which O2DPG MC task reads and produces which files --- UTILS/FileIOGraph/README.md | 38 +++++ UTILS/FileIOGraph/analyse_FileIO.py | 193 +++++++++++++++++++++++ UTILS/FileIOGraph/monitor_fileaccess.cpp | 188 ++++++++++++++++++++++ 3 files changed, 419 insertions(+) create mode 100644 UTILS/FileIOGraph/README.md create mode 100755 UTILS/FileIOGraph/analyse_FileIO.py create mode 100644 UTILS/FileIOGraph/monitor_fileaccess.cpp diff --git a/UTILS/FileIOGraph/README.md b/UTILS/FileIOGraph/README.md new file mode 100644 index 000000000..48443b310 --- /dev/null +++ b/UTILS/FileIOGraph/README.md @@ -0,0 +1,38 @@ +This is a small custom tool to monitor file access +and to produce graphs of file production and file consumption +by O2DPG Monte Carlo tasks. Such information can be useful for + +(a) verification of data paths +(b) early removal of files as soon as they are not needed anymore + + +In more detail, core elements of this directory are + +* monitor_fileaccess: + +A tool, useable by root, providing reports about +read and write events to files and which process is involved. +The tool is based on the efficient fanotify kernel system and reporting +can be restricted to certain shells (by giving a mother PID). + +The tool is standalone and can be compiled, if needed, by running + +`g++ monitor_fileaccess.cpp -O2 -o monitor_fileaccess.exe` + +The tool can be run simply by + +``` +sudo MAXMOTHERPID=689584 ./monitor.exe | tee /tmp/fileaccess +``` + +to monitor file events happening by child processes of shell 689584. + + +* analyse_FileIO.py: + + + + + + + diff --git a/UTILS/FileIOGraph/analyse_FileIO.py b/UTILS/FileIOGraph/analyse_FileIO.py new file mode 100755 index 000000000..5ad4523f9 --- /dev/null +++ b/UTILS/FileIOGraph/analyse_FileIO.py @@ -0,0 +1,193 @@ +#!/usr/bin/env python3 + +# This is a python script which analyses +# a report from a "fanotify" file access report +# convoluted with task information from an O2DPG MC workflow. +# The tool produces: +# - a json report +# - optionally a graphviz visualization of file and task dependencies + +import argparse +import re +import json + +try: + from graphviz import Digraph + havegraphviz=True +except ImportError: + havegraphviz=False + +parser = argparse.ArgumentParser(description='Produce O2DPG MC file dependency reports') + +# the run-number of data taking or default if unanchored +parser.add_argument('--actionFile', type=str, help="O2DPG pipeline runner action file") +parser.add_argument('--monitorFile', type=str, help="monitoring file provided by fanotify tool. See O2DPG/UTILS/FileIOGraph.") +parser.add_argument('--basedir', type=str, help="O2DPG workflow dir") +parser.add_argument('--file-filters', nargs='+', default=[r'.*'], help="Filters (regular expressions) to select files (default all = '.*')") +parser.add_argument('--graphviz', type=str, help="Produce a graphviz plot") +parser.add_argument('-o','--output', type=str, help="Output JSON report") + +args = parser.parse_args() + +# what do we need to do +# (a) - parse action File for mapping of O2DPG task name to PID +# ---> fills pid_to_task + task_to_pid + +# Define the pattern using regular expressions +pid_to_O2DPGtask = {} +O2DPGtask_to_pid = {} + +pattern = re.compile(r'.*INFO Task (\d+).*:(\w+) finished with status 0') +# Open the action file and process each line +with open(args.actionFile, 'r') as file: + for line in file: + # Try to match the pattern in each line + match = pattern.match(line) + + # If a match is found, extract the information + if match: + task_number = match.group(1) + task_name = match.group(2) + + pid_to_O2DPGtask[task_number] = task_name + O2DPGtask_to_pid[task_name] = task_number + + +# (b) - parse monitor file for mapping from files to processes and operation +# ---> fills the following structures: +task_reads = { tname : set() for tname in O2DPGtask_to_pid } +task_writes = { tname : set() for tname in O2DPGtask_to_pid } +file_written_task = {} +file_consumed_task = {} + +pattern = re.compile(args.basedir + r'([^,]+),((?:read|write)),(.*)') +# neglecting some framework file names +file_exclude_filter = re.compile(r'(.*)\.log(.*)|(ccdb/log)|(.*)dpl-config\.json') + +# construct user-filter regular expressions +file_filter_re = [ re.compile(l) for l in args.file_filters ] + +with open(args.monitorFile, 'r') as file: + for line in file: + # Try to match the pattern in each line + match = pattern.match(line) + if match: + file_name = match.group(1) + mode = match.group(2) + pids = match.group(3).split(";") + + # implement file name filter + if file_exclude_filter.match(file_name): + continue + + # look if file matches one of the user provided filters + file_matches = False + for r in file_filter_re: + if r.match(file_name): + file_matches = True + break + + if not file_matches: + continue + + if file_consumed_task.get(file_name) == None: + file_consumed_task[file_name] = set() + if file_written_task.get(file_name) == None: + file_written_task[file_name] = set() + + for p in pids: + if p in pid_to_O2DPGtask: + task = pid_to_O2DPGtask.get(p) + if mode == 'read': + task_reads.get(task).add(file_name) + file_consumed_task[file_name].add(task) + + if mode == 'write': + task_writes.get(task).add(file_name) + file_written_task[file_name].add(task) + + +# draws the graph of files and tasks +def draw_graph(graphviz_filename): + if not havegraphviz: + print('graphviz not installed, cannot draw workflow') + return + + dot = Digraph(comment='O2DPG file - task network') + + ccdbfilter = re.compile('ccdb(.*)/snapshot.root') + + nametoindex={} + index=0 + + allfiles = set(file_written_task.keys()) | set(file_consumed_task.keys()) + normalfiles = [ s for s in allfiles if not ccdbfilter.match(s) ] + ccdbfiles = [ (s, ccdbfilter.match(s).group(1)) for s in allfiles if ccdbfilter.match(s) ] + + with dot.subgraph(name='CCDB') as ccdbpartition: + ccdbpartition.attr(color = 'blue') + for f in ccdbfiles: + nametoindex[f[0]] = index + ccdbpartition.node(str(index), f[1], color = 'blue') + index = index + 1 + + with dot.subgraph(name='normal') as normalpartition: + normalpartition.attr(color = 'black') + for f in normalfiles: + nametoindex[f] = index + normalpartition.node(str(index), f, color = 'red') + index = index + 1 + for t in O2DPGtask_to_pid: + nametoindex[t] = index + normalpartition.node(str(index), t, shape = 'box', color = 'green', style = 'filled' ) + index = index + 1 + + # edges (arrows between files and tasks) + for node in file_consumed_task: + # node is a file (source) + sourceindex = nametoindex[node] + for task in file_consumed_task[node]: + toindex = nametoindex[task] + dot.edge(str(sourceindex), str(toindex)) + + # edges (arrows between files and tasks) + for node in file_written_task: + # node is a file (target) + toindex = nametoindex[node] + for task in file_written_task[node]: + sourceindex = nametoindex[task] + dot.edge(str(sourceindex), str(toindex)) + + dot.render(graphviz_filename, format='pdf') + dot.render(graphviz_filename, format='gv') + +def write_json_report(json_file_name): + # produce a JSON report of file dependencies + all_filenames = set(file_written_task.keys()) | set(file_consumed_task.keys()) + file_written_task_tr = [ + { + "file" : k, + "written_by" : list(file_written_task.get(k, [])), + "read_by" : list(file_consumed_task.get(k, [])) + } + for k in all_filenames + ] + + tasks_output = [ + { + "task" : t, + "writes" : list(task_writes.get(t,[])), + "reads" : list(task_reads.get(t,[])) + } + for t in O2DPGtask_to_pid + ] + + # Write the dictionary to a JSON file + with open(json_file_name, 'w') as json_file: + json.dump({ "file_report" : file_written_task_tr, "task_report" : tasks_output }, json_file, indent=2) + +if args.graphviz: + draw_graph(args.graphviz) + +if args.output: + write_json_report(args.output) \ No newline at end of file diff --git a/UTILS/FileIOGraph/monitor_fileaccess.cpp b/UTILS/FileIOGraph/monitor_fileaccess.cpp new file mode 100644 index 000000000..50567e593 --- /dev/null +++ b/UTILS/FileIOGraph/monitor_fileaccess.cpp @@ -0,0 +1,188 @@ +#include +#include +#include +#include +#include +#include +#include +#include +#define CHK(expr, errcode) \ + if ((expr) == errcode) \ + perror(#expr), exit(EXIT_FAILURE) + +#include +#include +#include +#include +#include +#include + +#define MAXBUF (BUFSIZ * 2) + +int getppid(int pid) +{ + int ppid; + char buf[MAXBUF]; + char procname[32]; // Holds /proc/4294967296/status\0 + FILE* fp; + + snprintf(procname, sizeof(procname), "/proc/%u/status", pid); + fp = fopen(procname, "r"); + if (fp != NULL) { + size_t ret = fread(buf, sizeof(char), MAXBUF - 1, fp); + if (!ret) { + return 0; + } else { + buf[ret++] = '\0'; // Terminate it. + } + fclose(fp); + } + char* ppid_loc = strstr(buf, "\nPPid:"); + if (ppid_loc) { + int ret = sscanf(ppid_loc, "\nPPid:%d", &ppid); + if (!ret || ret == EOF) { + return 0; + } + return ppid; + } else { + return 0; + } +} + +std::string getcmd(pid_t pid) +{ + char path[1024]; + snprintf(path, sizeof(path), "/proc/%d/cmdline", pid); + if (pid == 0 || pid == 1) { + return std::string(""); + } + + FILE* file = fopen(path, "r"); + if (file) { + char buffer[1024]; // max 1024 chars + size_t bytesRead = fread(buffer, 1, sizeof(buffer), file); + fclose(file); + for (int byte = 0; byte < bytesRead; ++byte) { + if (buffer[byte] == '\0') { + buffer[byte] == '@'; + } + } + return std::string(buffer); + } + return std::string(""); +} + +std::unordered_map good_pid; + +bool is_good_pid(int pid, int maxparent) +{ + auto iter = good_pid.find(pid); + if (iter != good_pid.end()) { + // the result is known + return iter->second; + } + // the result is not known ---> determine it + + // this means determining the whole chain of parent ids + if (pid == maxparent) { + good_pid[pid] = true; + } else if (pid == 0) { + good_pid[pid] = false; + } else { + good_pid[pid] = is_good_pid(getppid(pid), maxparent); + } + return good_pid[pid]; +} + +int main(int argc, char** argv) +{ + int fan; + char buf[4096]; + char fdpath[32]; + char path[PATH_MAX + 1]; + ssize_t buflen, linklen; + struct fanotify_event_metadata* metadata; + + CHK(fan = fanotify_init(FAN_CLASS_NOTIF, O_RDONLY), -1); + CHK(fanotify_mark(fan, FAN_MARK_ADD | FAN_MARK_MOUNT, + FAN_CLOSE_WRITE | FAN_CLOSE_NOWRITE | FAN_EVENT_ON_CHILD, AT_FDCWD, "/"), + -1); + + std::unordered_map pid_to_parents; // mapping of a process id to the whole string of parent pids, separated by ';' + std::unordered_map pid_to_command; // mapping of a process id to a command + + auto MAX_MOTHER_PID_ENV = getenv("MAXMOTHERPID"); + int max_mother_pid = 1; // everything + if (MAX_MOTHER_PID_ENV != nullptr) { + std::cerr << "found env variablen"; + max_mother_pid = std::atoi(MAX_MOTHER_PID_ENV); + std::cerr << "Setting topmost mother process to " << max_mother_pid << "\n"; + } else { + std::cerr << "No environment given\n"; + } + + auto thispid = getpid(); + std::string* parentspid = nullptr; + + for (;;) { + CHK(buflen = read(fan, buf, sizeof(buf)), -1); + metadata = (struct fanotify_event_metadata*)&buf; + while (FAN_EVENT_OK(metadata, buflen)) { + if (metadata->mask & FAN_Q_OVERFLOW) { + printf("Queue overflow!\n"); + continue; + } + sprintf(fdpath, "/proc/self/fd/%d", metadata->fd); + CHK(linklen = readlink(fdpath, path, sizeof(path) - 1), -1); + path[linklen] = '\0'; + auto pid = metadata->pid; + + bool record = true; + + // no need to monitor ourselfs + record = record && pid != thispid; + + // check if we have the right events before continuing + record = record && (((metadata->mask & FAN_CLOSE_WRITE) || (metadata->mask & FAN_CLOSE_NOWRITE))); + + // check if we have the right pid before continuing + record = record && is_good_pid(pid, max_mother_pid); + + if (record) { + auto iter = pid_to_parents.find((int)pid); + if (iter != pid_to_parents.end()) { + parentspid = &iter->second; + } else { + std::stringstream str; + // get chain of parent pids + auto current = (int)pid; + str << current; + while (current != max_mother_pid && current != 0) { + // record command line of current if not already cached + if (pid_to_command.find((int)current) == pid_to_command.end()) { + std::string cmd{getcmd(current)}; + pid_to_command[current] = cmd; + printf("pid-to-command:%i:%s\n", current, cmd.c_str()); + } + + auto next = getppid(current); + current = next; + str << ";" << current; + } + pid_to_parents[(int)pid] = str.str(); + parentspid = &pid_to_parents[(int)pid]; + } + + if (metadata->mask & FAN_CLOSE_WRITE) { + printf("%s,write,%s\n", path, parentspid->c_str()); + } + if (metadata->mask & FAN_CLOSE_NOWRITE) { + printf("%s,read,%s\n", path, parentspid->c_str()); + } + } + + close(metadata->fd); + metadata = FAN_EVENT_NEXT(metadata, buflen); + } + } +} From ac9549fd177e22821f0aca18ba6d5d7870e87199 Mon Sep 17 00:00:00 2001 From: benedikt-voelkel Date: Thu, 8 Feb 2024 16:35:12 +0100 Subject: [PATCH 026/101] Make reading patterns from file more resilient (#1450) Co-authored-by: Benedikt Volkel --- RelVal/utils/o2dpg_release_validation_utils.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/RelVal/utils/o2dpg_release_validation_utils.py b/RelVal/utils/o2dpg_release_validation_utils.py index 633e424c6..ca1d786f5 100755 --- a/RelVal/utils/o2dpg_release_validation_utils.py +++ b/RelVal/utils/o2dpg_release_validation_utils.py @@ -280,8 +280,15 @@ def set_object_name_patterns(self, include_patterns, exclude_patterns): def load_this_patterns(patterns): if not patterns or not patterns[0].startswith("@"): return patterns + + patterns_from_file = [] with open(patterns[0][1:], "r") as f: - return f.read().splitlines() + for line in f: + line = line.strip() + if not line: + continue + patterns_from_file.append(line) + return patterns_from_file self.include_patterns = load_this_patterns(include_patterns) self.exclude_patterns = load_this_patterns(exclude_patterns) From a742df325715628593f7648ba7dfa14a4b5daa38 Mon Sep 17 00:00:00 2001 From: Ole Schmidt Date: Fri, 9 Feb 2024 16:20:19 +0100 Subject: [PATCH 027/101] Update TRD QC json files (#1448) --- DATA/production/qc-async/trd.json | 10 ++-------- DATA/production/qc-sync/trd.json | 19 +++++++++++++------ 2 files changed, 15 insertions(+), 14 deletions(-) diff --git a/DATA/production/qc-async/trd.json b/DATA/production/qc-async/trd.json index d32e1fbfd..3d9633ea1 100644 --- a/DATA/production/qc-async/trd.json +++ b/DATA/production/qc-async/trd.json @@ -4,22 +4,16 @@ "database": { "implementation": "CCDB", "host": "ccdb-test.cern.ch:8080", - "username": "not_applicable", - "password": "not_applicable", - "name": "not_applicable" }, "Activity": { "type": "2", "number": "42" }, "monitoring": { - "url": "influxdb-unix:///tmp/telegraf.sock" - }, - "consul": { - "url": "alio2-cr1-hv-aliecs:8500" + "url": "infologger:///debug?qc" }, "conditionDB": { - "url": "alio2-cr1-hv-qcdb1.cern.ch:8083" + "url": "ccdb-test.cern.ch:8080" } }, "tasks": { diff --git a/DATA/production/qc-sync/trd.json b/DATA/production/qc-sync/trd.json index a47867470..4895e8c85 100644 --- a/DATA/production/qc-sync/trd.json +++ b/DATA/production/qc-sync/trd.json @@ -22,7 +22,6 @@ "url": "o2-ccdb.internal" }, "infologger": { - "": "Message at this level or above are discarded (default: 21 - Trace)", "filterDiscardDebug": "false", "filterDiscardLevel": "11" } @@ -33,11 +32,15 @@ "className": "o2::quality_control_modules::trd::RawData", "moduleName": "QcTRD", "detectorName": "TRD", - "cycleDurationSeconds": "60", + "cycleDurations": [ + {"cycleDurationSeconds": 30, "validitySeconds": 180}, + {"cycleDurationSeconds": 180, "validitySeconds": 1} + ], "dataSource": { "type": "direct", "query": "rawstats:TRD/RAWSTATS" }, + "disableLastCycle": "true", "location": "local", "localMachines": [ "epn", @@ -54,11 +57,12 @@ "className": "o2::quality_control_modules::trd::DigitsTask", "moduleName": "QcTRD", "detectorName": "TRD", - "cycleDurationSeconds": "60", + "cycleDurationSeconds": "180", "dataSource": { "type": "dataSamplingPolicy", "name": "trdall" }, + "disableLastCycle": "true", "location": "local", "localMachines": [ "epn", @@ -79,11 +83,12 @@ "className": "o2::quality_control_modules::trd::TrackletsTask", "moduleName": "QcTRD", "detectorName": "TRD", - "cycleDurationSeconds": "60", + "cycleDurationSeconds": "120", "dataSource": { "type": "dataSamplingPolicy", "name": "trdall" }, + "disableLastCycle": "true", "location": "local", "localMachines": [ "epn", @@ -105,11 +110,12 @@ "className": "o2::quality_control_modules::trd::PulseHeightTrackMatch", "moduleName": "QcTRD", "detectorName": "TRD", - "cycleDurationSeconds": "60", + "cycleDurationSeconds": "180", "dataSource": { "type": "direct", "query": "phValues:TRD/PULSEHEIGHT" }, + "disableLastCycle": "true", "location": "local", "localMachines": [ "epn", @@ -129,11 +135,12 @@ "className": "o2::quality_control_modules::trd::TrackingTask", "moduleName": "QcTRD", "detectorName": "TRD", - "cycleDurationSeconds": "60", + "cycleDurationSeconds": "180", "dataSource": { "type": "direct", "query": "trackITSTPCTRD:TRD/MATCH_ITSTPC;trigITSTPCTRD:TRD/TRGREC_ITSTPC" }, + "disableLastCycle": "true", "location": "local", "localMachines": [ "epn", From d17fdfb923737857897bfca4d069183ffa969fc9 Mon Sep 17 00:00:00 2001 From: Chiara Zampolli Date: Fri, 9 Feb 2024 17:14:37 +0100 Subject: [PATCH 028/101] Fixes to make the split wf work again (#1451) --- DATA/common/gen_topo_helper_functions.sh | 12 ++++- DATA/common/setenv.sh | 12 +++++ DATA/common/setenv_calib.sh | 7 +++ .../configurations/asyncReco/async_pass.sh | 15 ++++-- .../configurations/asyncReco/setenv_extra.sh | 51 +++++++++---------- 5 files changed, 66 insertions(+), 31 deletions(-) diff --git a/DATA/common/gen_topo_helper_functions.sh b/DATA/common/gen_topo_helper_functions.sh index 6a3e84149..00fc1f118 100755 --- a/DATA/common/gen_topo_helper_functions.sh +++ b/DATA/common/gen_topo_helper_functions.sh @@ -9,9 +9,19 @@ has_detector() [[ $WORKFLOW_DETECTORS =~ (^|,)"$1"(,|$) ]] } +has_detector_from_global_reader_clusters() +{ + [[ $WORKFLOW_DETECTORS_USE_GLOBAL_READER_CLUSTERS =~ (^|,)"$1"(,|$) ]] +} + +has_detector_from_global_reader_tracks() +{ + [[ $WORKFLOW_DETECTORS_USE_GLOBAL_READER_TRACKS =~ (^|,)"$1"(,|$) ]] +} + has_detector_from_global_reader() { - [[ $WORKFLOW_DETECTORS_USE_GLOBAL_READER =~ (^|,)"$1"(,|$) ]] + has_detector_from_global_reader_tracks $1 || has_detector_from_global_reader_clusters $1 } has_detector_calib() diff --git a/DATA/common/setenv.sh b/DATA/common/setenv.sh index 6c31539c9..f7342e553 100755 --- a/DATA/common/setenv.sh +++ b/DATA/common/setenv.sh @@ -49,6 +49,8 @@ if [[ -z "${WORKFLOW_DETECTORS_RECO+x}" ]] || [[ "0$WORKFLOW_DETECTORS_RECO" == if [[ -z "${WORKFLOW_DETECTORS_CTF+x}" ]] || [[ "0$WORKFLOW_DETECTORS_CTF" == "0ALL" ]]; then export WORKFLOW_DETECTORS_CTF=$WORKFLOW_DETECTORS; fi if [[ "0${WORKFLOW_DETECTORS_FLP_PROCESSING:-}" == "0ALL" ]]; then export WORKFLOW_DETECTORS_FLP_PROCESSING=$WORKFLOW_DETECTORS; fi if [[ "0${WORKFLOW_DETECTORS_USE_GLOBAL_READER:-}" == "0ALL" ]]; then export WORKFLOW_DETECTORS_USE_GLOBAL_READER=$WORKFLOW_DETECTORS; else export WORKFLOW_DETECTORS_USE_GLOBAL_READER=${WORKFLOW_DETECTORS_USE_GLOBAL_READER:-}; fi +if [[ "0${WORKFLOW_DETECTORS_USE_GLOBAL_READER_TRACKS:-}" == "0ALL" ]]; then export WORKFLOW_DETECTORS_USE_GLOBAL_READER_TRACKS=$WORKFLOW_DETECTORS; else export WORKFLOW_DETECTORS_USE_GLOBAL_READER_TRACKS=${WORKFLOW_DETECTORS_USE_GLOBAL_READER_TRACKS:-}; fi +if [[ "0${WORKFLOW_DETECTORS_USE_GLOBAL_READER_CLUSTERS:-}" == "0ALL" ]]; then export WORKFLOW_DETECTORS_USE_GLOBAL_READER_CLUSTERS=$WORKFLOW_DETECTORS; else export WORKFLOW_DETECTORS_USE_GLOBAL_READER_CLUSTERS=${WORKFLOW_DETECTORS_USE_GLOBAL_READER_CLUSTERS:-}; fi if [[ -z "${WORKFLOW_PARAMETERS:-}" ]]; then export WORKFLOW_PARAMETERS=; fi if [[ ! -z ${WORKFLOW_DETECTORS_EXCLUDE_QC:-} ]]; then @@ -61,6 +63,16 @@ if [[ ! -z ${WORKFLOW_DETECTORS_EXCLUDE_CALIB:-} ]]; then export WORKFLOW_DETECTORS_CALIB=$(echo $WORKFLOW_DETECTORS_CALIB | sed -e "s/,$i,/,/g" -e "s/^$i,//" -e "s/,$i"'$'"//" -e "s/^$i"'$'"//") done fi +if [[ ! -z ${WORKFLOW_DETECTORS_EXCLUDE_GLOBAL_READER_TRACKS:-} ]]; then + for i in ${WORKFLOW_DETECTORS_EXCLUDE_GLOBAL_READER_TRACKS//,/ }; do + export WORKFLOW_DETECTORS_USE_GLOBAL_READER_TRACKS=$(echo $WORKFLOW_DETECTORS_USE_GLOBAL_READER_TRACKS | sed -e "s/,$i,/,/g" -e "s/^$i,//" -e "s/,$i"'$'"//" -e "s/^$i"'$'"//") + done +fi +if [[ ! -z ${WORKFLOW_DETECTORS_EXCLUDE_GLOBAL_READER_CLUSTERS:-} ]]; then + for i in ${WORKFLOW_DETECTORS_EXCLUDE_GLOBAL_READER_CLUSTERS//,/ }; do + export WORKFLOW_DETECTORS_USE_GLOBAL_READER_CLUSTERS=$(echo $WORKFLOW_DETECTORS_USE_GLOBAL_READER_CLUSTERS | sed -e "s/,$i,/,/g" -e "s/^$i,//" -e "s/,$i"'$'"//" -e "s/^$i"'$'"//") + done +fi if [[ -z "${TFLOOP:-}" ]]; then export TFLOOP=0; fi # loop over timeframes if [[ -z "${NTIMEFRAMES:-}" ]]; then export NTIMEFRAMES=-1; fi # max number of time frames to process, <=0 : unlimited diff --git a/DATA/common/setenv_calib.sh b/DATA/common/setenv_calib.sh index 87fe461ea..a1d2e7692 100755 --- a/DATA/common/setenv_calib.sh +++ b/DATA/common/setenv_calib.sh @@ -30,6 +30,8 @@ if has_detector_calib FDD && has_processing_step FDD_RECO; then CAN_DO_CALIB_FDD if has_detector_calib ZDC && has_processing_step ZDC_RECO; then CAN_DO_CALIB_ZDC_TDC=1; else CAN_DO_CALIB_ZDC_TDC=0; fi # for async recalibration if has_detector_calib EMC && has_detector_reco EMC && [[ $SYNCMODE != 1 ]]; then CAN_DO_CALIB_EMC_ASYNC_RECALIB=1; else CAN_DO_CALIB_EMC_ASYNC_RECALIB=0; fi +if [[ $SYNCMODE != 1 ]] && has_detector_reco TPC; then CAN_DO_CALIB_ASYNC_EXTRACTTPCCURRENTS=1; else CAN_DO_CALIB_ASYNC_EXTRACTTPCCURRENTS=0; fi +if [[ $SYNCMODE != 1 ]] && has_detector_reco TPC && has_detector_reco ITS && has_detector_reco FT0; then CAN_DO_CALIB_ASYNC_EXTRACTTIMESERIES=1; else CAN_DO_CALIB_ASYNC_EXTRACTTIMESERIES=0; fi # additional individual settings for calibration workflows if has_detector CTP; then export CALIB_TPC_SCDCALIB_CTP_INPUT="--enable-ctp"; else export CALIB_TPC_SCDCALIB_CTP_INPUT=""; fi @@ -174,6 +176,11 @@ fi ( [[ -z ${CALIB_ZDC_TDC:-} ]] || [[ $CAN_DO_CALIB_ZDC_TDC == 0 ]] ) && CALIB_ZDC_TDC=0 # for async: ( [[ -z ${CALIB_EMC_ASYNC_RECALIB:-} ]] || [[ $CAN_DO_CALIB_EMC_ASYNC_RECALIB == 0 ]] ) && CALIB_EMC_ASYNC_RECALIB=0 +( [[ -z ${CALIB_ASYNC_EXTRACTTPCCURRENTS:-} ]] || [[ $CAN_DO_CALIB_ASYNC_EXTRACTTPCCURRENTS == 0 ]] ) && CALIB_ASYNC_EXTRACTTPCCURRENTS=0 +( [[ -z ${CALIB_ASYNC_DISABLE3DCURRENTS:-} ]] || [[ $CAN_DO_CALIB_ASYNC_DISABLE3DCURRENTS == 0 ]] ) && CALIB_ASYNC_DISABLE3DCURRENTS=0 +: ${ON_SKIMMED_DATA:=0} +( [[ -z ${CALIB_ASYNC_EXTRACTTIMESERIES:-} ]] || [[ $CAN_DO_CALIB_ASYNC_EXTRACTTIMESERIES == 0 ]] ) && CALIB_ASYNC_EXTRACTTIMESERIES=0 + if [[ "0${GEN_TOPO_VERBOSE:-}" == "01" ]]; then echo "CALIB_PRIMVTX_MEANVTX = $CALIB_PRIMVTX_MEANVTX" 1>&2 diff --git a/DATA/production/configurations/asyncReco/async_pass.sh b/DATA/production/configurations/asyncReco/async_pass.sh index c65ac7b49..05ed0dea2 100755 --- a/DATA/production/configurations/asyncReco/async_pass.sh +++ b/DATA/production/configurations/asyncReco/async_pass.sh @@ -149,6 +149,14 @@ if [[ -n "$ALIEN_JDL_DOMEANVTXCALIB" ]]; then export ADD_CALIB=1 fi +if [[ "$ALIEN_JDL_EXTRACTCURRENTS" == "1" ]]; then + export ADD_CALIB=1 +fi + +if [[ "$ALIEN_JDL_EXTRACTTIMESERIES" == "1" ]]; then + export ADD_CALIB=1 +fi + # AOD file size if [[ -n "$ALIEN_JDL_AODFILESIZE" ]]; then export AOD_FILE_SIZE="$ALIEN_JDL_AODFILESIZE" @@ -577,18 +585,19 @@ else if ([[ -z "$ALIEN_JDL_SSPLITSTEP" ]] && [[ -z "$ALIEN_JDL_SSPLITSTEP" ]]) || [[ "$ALIEN_JDL_SSPLITSTEP" -eq 3 ]] || ( [[ -n $ALIEN_JDL_STARTSPLITSTEP ]] && [[ "$ALIEN_JDL_STARTSPLITSTEP" -le 3 ]]) || [[ "$ALIEN_JDL_SSPLITSTEP" -eq "all" ]]; then # 3. matching, QC, calib, AOD WORKFLOW_PARAMETERS=$WORKFLOW_PARAMETERS_START + echo "WORKFLOW_PARAMETERS=$WORKFLOW_PARAMETERS" echo "Step 3) matching, QC, calib, AOD" echo -e "\nStep 3) matching, QC, calib, AOD" >> workflowconfig.log export TIMEFRAME_RATE_LIMIT=0 echo "Removing detectors $DETECTORS_EXCLUDE" READER_DELAY=${ALIEN_JDL_READERDELAY:-30} export ARGS_EXTRA_PROCESS_o2_global_track_cluster_reader+=" --reader-delay $READER_DELAY " - echo "extra args are $ARGS_EXTRA_PROCESS_o2_global_track_cluster_reader_workflow" - env $SETTING_ROOT_OUTPUT IS_SIMULATED_DATA=0 WORKFLOWMODE=print TFDELAY=$TFDELAYSECONDS WORKFLOW_DETECTORS=ALL WORKFLOW_DETECTORS_EXCLUDE=$DETECTORS_EXCLUDE WORKFLOW_DETECTORS_USE_GLOBAL_READER=ALL WORKFLOW_DETECTORS_EXCLUDE_QC=CPV,$DETECTORS_EXCLUDE ./run-workflow-on-inputlist.sh $INPUT_TYPE list.list >> workflowconfig.log + echo "extra args are $ARGS_EXTRA_PROCESS_o2_global_track_cluster_reader" + env $SETTING_ROOT_OUTPUT IS_SIMULATED_DATA=0 WORKFLOWMODE=print TFDELAY=$TFDELAYSECONDS WORKFLOW_DETECTORS=ALL WORKFLOW_DETECTORS_EXCLUDE=$DETECTORS_EXCLUDE WORKFLOW_DETECTORS_USE_GLOBAL_READER_TRACKS=ALL WORKFLOW_DETECTORS_USE_GLOBAL_READER_CLUSTERS=ALL WORKFLOW_DETECTORS_EXCLUDE_GLOBAL_READER_TRACKS=HMP WORKFLOW_DETECTORS_EXCLUDE_QC=CPV,$DETECTORS_EXCLUDE ./run-workflow-on-inputlist.sh $INPUT_TYPE list.list >> workflowconfig.log # run it if [[ "0$RUN_WORKFLOW" != "00" ]]; then timeStart=`date +%s` - time env $SETTING_ROOT_OUTPUT IS_SIMULATED_DATA=0 WORKFLOWMODE=run TFDELAY=$TFDELAYSECONDS WORKFLOW_DETECTORS=ALL WORKFLOW_DETECTORS_USE_GLOBAL_READER=ALL WORKFLOW_DETECTORS_EXCLUDE=$DETECTORS_EXCLUDE WORKFLOW_DETECTORS_EXCLUDE_QC=CPV,$DETECTORS_EXCLUDE ./run-workflow-on-inputlist.sh $INPUT_TYPE list.list + time env $SETTING_ROOT_OUTPUT IS_SIMULATED_DATA=0 WORKFLOWMODE=run TFDELAY=$TFDELAYSECONDS WORKFLOW_DETECTORS=ALL WORKFLOW_DETECTORS_EXCLUDE=$DETECTORS_EXCLUDE WORKFLOW_DETECTORS_USE_GLOBAL_READER_TRACKS=ALL WORKFLOW_DETECTORS_USE_GLOBAL_READER_CLUSTERS=ALL WORKFLOW_DETECTORS_EXCLUDE_GLOBAL_READER_TRACKS=HMP WORKFLOW_DETECTORS_EXCLUDE_QC=CPV,$DETECTORS_EXCLUDE ./run-workflow-on-inputlist.sh $INPUT_TYPE list.list exitcode=$? timeEnd=`date +%s` timeUsed=$(( $timeUsed+$timeEnd-$timeStart )) diff --git a/DATA/production/configurations/asyncReco/setenv_extra.sh b/DATA/production/configurations/asyncReco/setenv_extra.sh index a34d8ffce..1036128d6 100644 --- a/DATA/production/configurations/asyncReco/setenv_extra.sh +++ b/DATA/production/configurations/asyncReco/setenv_extra.sh @@ -553,6 +553,10 @@ if [[ $ADD_CALIB == "1" ]]; then export CALIB_ZDC_TDC=0 export CALIB_FT0_TIMEOFFSET=0 export CALIB_TPC_SCDCALIB=0 + export CALIB_FT0_INTEGRATEDCURR=0 + export CALIB_FV0_INTEGRATEDCURR=0 + export CALIB_FDD_INTEGRATEDCURR=0 + export CALIB_TOF_INTEGRATEDCURR=0 if [[ $DO_TPC_RESIDUAL_EXTRACTION == "1" ]]; then export CALIB_TPC_SCDCALIB=1 export CALIB_TPC_SCDCALIB_SENDTRKDATA=1 @@ -582,38 +586,31 @@ if [[ $ADD_CALIB == "1" ]]; then export ARGS_EXTRA_PROCESS_o2_calibration_trd_workflow="$ARGS_EXTRA_PROCESS_o2_calibration_trd_workflow --enable-root-output" export CALIB_TRD_GAIN=1 fi - if [[ $ALIEN_JDL_DOUPLOADSLOCALLY == 1 ]]; then - export CCDB_POPULATOR_UPLOAD_PATH="file://$PWD" + # extra workflows in case we want to process the currents for FT0, FV0, TOF, TPC + if [[ -n $ALIEN_JDL_EXTRACTCURRENTS ]] ; then + export CALIB_FT0_INTEGRATEDCURR=$ALIEN_JDL_EXTRACTCURRENTS + export CALIB_FV0_INTEGRATEDCURR=$ALIEN_JDL_EXTRACTCURRENTS + export CALIB_FDD_INTEGRATEDCURR=$ALIEN_JDL_EXTRACTCURRENTS + export CALIB_TOF_INTEGRATEDCURR=$ALIEN_JDL_EXTRACTCURRENTS + export CALIB_ASYNC_EXTRACTTPCCURRENTS=$ALIEN_JDL_EXTRACTCURRENTS fi -fi - -# extra workflows in case we want to process the currents for FT0, FV0, TOF, TPC -if [[ $ALIEN_JDL_EXTRACTCURRENTS == 1 ]]; then - if [[ -z "${WORKFLOW_DETECTORS_RECO+x}" ]] || [[ "0$WORKFLOW_DETECTORS_RECO" == "0ALL" ]]; then export WORKFLOW_DETECTORS_RECO=$WORKFLOW_DETECTORS; fi - has_detector_reco FT0 && add_comma_separated ADD_EXTRA_WORKFLOW "o2-ft0-integrate-cluster-workflow" - has_detector_reco FV0 && add_comma_separated ADD_EXTRA_WORKFLOW "o2-fv0-integrate-cluster-workflow" - has_detector_reco TOF && add_comma_separated ADD_EXTRA_WORKFLOW "o2-tof-integrate-cluster-workflow" - if [[ $ALIEN_JDL_DISABLE3DCURRENTS != 1 ]]; then - export ARGS_EXTRA_PROCESS_o2_tpc_integrate_cluster_workflow="$ARGS_EXTRA_PROCESS_o2_tpc_integrate_cluster_workflow--process-3D-currents --nSlicesTF 1" + if [[ -n $ALIEN_JDL_DISABLE3DCURRENTS ]]; then + export CALIB_ASYNC_DISABLE3DCURRENTS=$ALIEN_JDL_DISABLE3DCURRENTS fi - has_detector_reco TPC && add_comma_separated ADD_EXTRA_WORKFLOW "o2-tpc-integrate-cluster-workflow" -fi -# extra workflows in case we want to process the currents for time series -if [[ $ALIEN_JDL_EXTRACTTIMESERIES == 1 ]]; then - if [[ -z "${WORKFLOW_DETECTORS_RECO+x}" ]] || [[ "0$WORKFLOW_DETECTORS_RECO" == "0ALL" ]]; then export WORKFLOW_DETECTORS_RECO=$WORKFLOW_DETECTORS; fi - has_detector_reco TPC && has_detector_reco ITS && has_detector_reco FT0 && add_comma_separated ADD_EXTRA_WORKFLOW "o2-tpc-time-series-workflow" - if [[ ! -z "$ALIEN_JDL_ENABLEUNBINNEDTIMESERIES" ]]; then - export ARGS_EXTRA_PROCESS_o2_tpc_time_series_workflow="$ARGS_EXTRA_PROCESS_o2_tpc_time_series_workflow --enable-unbinned-root-output --sample-unbinned-tsallis --threads 1" - fi - if [[ $ON_SKIMMED_DATA == 1 ]] || [[ ! -z "$ALIEN_JDL_SAMPLINGFACTORTIMESERIES" ]] ; then - if [[ $ON_SKIMMED_DATA == 1 ]] ; then - SAMPLINGFACTORTIMESERIES=0.1 + # extra workflows in case we want to process the currents for time series + if [[ -n $ALIEN_JDL_EXTRACTTIMESERIES ]] ; then + echo "Adding timeseries in setenv_extra.sh" + export CALIB_ASYNC_EXTRACTTIMESERIES=$ALIEN_JDL_EXTRACTTIMESERIES + if [[ -n $ALIEN_JDL_ENABLEUNBINNEDTIMESERIES ]]; then + export CALIB_ASYNC_ENABLEUNBINNEDTIMESERIES=$ALIEN_JDL_ENABLEUNBINNEDTIMESERIES fi - if [[ ! -z "$ALIEN_JDL_SAMPLINGFACTORTIMESERIES" ]]; then # this takes priority - export SAMPLINGFACTORTIMESERIES=${ALIEN_JDL_SAMPLINGFACTORTIMESERIES} + if [[ -n $ALIEN_JDL_SAMPLINGFACTORTIMESERIES ]]; then + export CALIB_ASYNC_SAMPLINGFACTORTIMESERIES=$ALIEN_JDL_SAMPLINGFACTORTIMESERIES fi - export ARGS_EXTRA_PROCESS_o2_tpc_time_series_workflow="$ARGS_EXTRA_PROCESS_o2_tpc_time_series_workflow --sampling-factor ${SAMPLINGFACTORTIMESERIES}" + fi + if [[ $ALIEN_JDL_DOUPLOADSLOCALLY == 1 ]]; then + export CCDB_POPULATOR_UPLOAD_PATH="file://$PWD" fi fi From a58581c6a8e25342fac65265cba6801ca2530b42 Mon Sep 17 00:00:00 2001 From: benedikt-voelkel Date: Sun, 11 Feb 2024 19:23:37 +0100 Subject: [PATCH 029/101] Fix typo (#1453) --- DATA/production/qc-async/trd.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/DATA/production/qc-async/trd.json b/DATA/production/qc-async/trd.json index 3d9633ea1..967c5b85e 100644 --- a/DATA/production/qc-async/trd.json +++ b/DATA/production/qc-async/trd.json @@ -3,7 +3,7 @@ "config": { "database": { "implementation": "CCDB", - "host": "ccdb-test.cern.ch:8080", + "host": "ccdb-test.cern.ch:8080" }, "Activity": { "type": "2", From e17e279d11ddaeb3cf9f5b944ebe98b39351a895 Mon Sep 17 00:00:00 2001 From: Timo Wilken Date: Mon, 12 Feb 2024 10:21:44 +0000 Subject: [PATCH 030/101] Check JSON syntax for pull requests (#1454) --- .github/workflows/check-json-syntax.yml | 37 +++++++++++++++++++++++++ 1 file changed, 37 insertions(+) create mode 100644 .github/workflows/check-json-syntax.yml diff --git a/.github/workflows/check-json-syntax.yml b/.github/workflows/check-json-syntax.yml new file mode 100644 index 000000000..fae51ae62 --- /dev/null +++ b/.github/workflows/check-json-syntax.yml @@ -0,0 +1,37 @@ +--- +name: Validate JSON syntax + +# Run on any commit or PR that changes any JSON file. +'on': + push: + paths: + - '**.json' + pull_request: + paths: + - '**.json' + +permissions: {} + +jobs: + json-syntax: + name: validate syntax + runs-on: ubuntu-latest + + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Validate syntax for JSON files + run: | + error=0 + readarray -d '' json_files < \ + <(find . \( -path ./.git -or -path ./DATA/testing/private \) -prune -false -or -type f -name '*.json' -print0) + for jsonf in "${json_files[@]}"; do + echo "::debug::Checking $jsonf..." + if ! errmsg=$(jq . "$jsonf" 2>&1 >/dev/null); then + error=1 + echo "Invalid JSON syntax found in $jsonf:" >&2 + printf '::error file=%s,title=%s::%s\n' "$jsonf" 'Invalid JSON syntax' "$errmsg" + fi + done + exit "$error" From f99e0fbaaaa15fc46c8431ea5af2366812012655 Mon Sep 17 00:00:00 2001 From: Sandro Wenzel Date: Mon, 12 Feb 2024 14:12:11 +0100 Subject: [PATCH 031/101] Anchoring: Ability to get detector list from GRPECS (#1452) --- MC/bin/o2dpg_sim_workflow.py | 1 + MC/bin/o2dpg_sim_workflow_anchored.py | 14 ++++++++++---- 2 files changed, 11 insertions(+), 4 deletions(-) diff --git a/MC/bin/o2dpg_sim_workflow.py b/MC/bin/o2dpg_sim_workflow.py index 5f116bab5..aaf9f640a 100755 --- a/MC/bin/o2dpg_sim_workflow.py +++ b/MC/bin/o2dpg_sim_workflow.py @@ -59,6 +59,7 @@ parser.add_argument('-trigger',help='event selection: particle, external', default='') parser.add_argument('-ini',help='generator init parameters file (full paths required), for example: ${O2DPG_ROOT}/MC/config/PWGHF/ini/GeneratorHF.ini', default='') parser.add_argument('-confKey',help='generator or trigger configuration key values, for example: "GeneratorPythia8.config=pythia8.cfg;A.x=y"', default='') +parser.add_argument('--readoutDets',help='comma separated string of detectors readout (does not modify material budget - only hit creation)', default='all') parser.add_argument('-interactionRate',help='Interaction rate, used in digitization', default=-1) parser.add_argument('-bcPatternFile',help='Bunch crossing pattern file, used in digitization (a file name or "ccdb")', default='') diff --git a/MC/bin/o2dpg_sim_workflow_anchored.py b/MC/bin/o2dpg_sim_workflow_anchored.py index 1876e4877..a50a0532a 100755 --- a/MC/bin/o2dpg_sim_workflow_anchored.py +++ b/MC/bin/o2dpg_sim_workflow_anchored.py @@ -121,7 +121,7 @@ def retrieve_CCDBObject_asJSON(ccdbreader, path, timestamp, objtype_external = N jsonTString = TBufferJSON.ConvertToJSON(obj, TClass.GetClass(objtype)) return json.loads(jsonTString.Data()) -def retrieve_sor_eor_fromGRPECS(ccdbreader, run_number, rct = None): +def retrieve_params_fromGRPECS(ccdbreader, run_number, rct = None): """ Retrieves start of run (sor), end of run (eor) and other global parameters from the GRPECS object, given a run number. We first need to find the right object @@ -191,8 +191,13 @@ def retrieve_sor_eor_fromGRPECS(ccdbreader, run_number, rct = None): print ("OrbitFirst", orbitFirst) # first orbit of this run print ("LastOrbit of run", orbitLast) + # Now fetch the detector list + print ("DetsReadout-Mask: ", grp["mDetsReadout"]['v']) + detList = o2.detectors.DetID.getNames(grp["mDetsReadout"]['v']) + print ("Detector list is ", detList) + # orbitReset.get(run_number) - return {"SOR": SOR, "EOR": EOR, "FirstOrbit" : orbitFirst, "LastOrbit" : orbitLast, "OrbitsPerTF" : int(grp["mNHBFPerTF"])} + return {"SOR": SOR, "EOR": EOR, "FirstOrbit" : orbitFirst, "LastOrbit" : orbitLast, "OrbitsPerTF" : int(grp["mNHBFPerTF"]), "detList" : detList} def retrieve_GRP(ccdbreader, timestamp): """ @@ -335,7 +340,7 @@ def main(): ccdbreader = CCDBAccessor(args.ccdb_url) # fetch the EOR/SOR rct_sor_eor = retrieve_sor_eor(ccdbreader, args.run_number) # <-- from RCT/Info - GLOparams = retrieve_sor_eor_fromGRPECS(ccdbreader, args.run_number, rct=rct_sor_eor) + GLOparams = retrieve_params_fromGRPECS(ccdbreader, args.run_number, rct=rct_sor_eor) if not GLOparams: print ("No time info found") sys.exit(1) @@ -419,7 +424,8 @@ def main(): # we finally pass forward to the unanchored MC workflow creation # TODO: this needs to be done in a pythonic way clearly - forwardargs += " -tf " + str(args.tf) + " --sor " + str(sor) + " --timestamp " + str(timestamp) + " --production-offset " + str(prod_offset) + " -run " + str(args.run_number) + " --run-anchored --first-orbit " + str(first_orbit) + " -field ccdb -bcPatternFile ccdb" + " --orbitsPerTF " + str(GLOparams["OrbitsPerTF"]) + " -col " + str(ColSystem) + " -eCM " + str(eCM) + forwardargs += " -tf " + str(args.tf) + " --sor " + str(sor) + " --timestamp " + str(timestamp) + " --production-offset " + str(prod_offset) + " -run " + str(args.run_number) + " --run-anchored --first-orbit " \ + + str(first_orbit) + " -field ccdb -bcPatternFile ccdb" + " --orbitsPerTF " + str(GLOparams["OrbitsPerTF"]) + " -col " + str(ColSystem) + " -eCM " + str(eCM) + ' --readoutDets ' + GLOparams['detList'] print ("forward args ", forwardargs) cmd = "${O2DPG_ROOT}/MC/bin/o2dpg_sim_workflow.py " + forwardargs print ("Creating time-anchored workflow...") From ce628a667b644f5e9cc00a0616f4677c34d88770 Mon Sep 17 00:00:00 2001 From: shahoian Date: Tue, 13 Feb 2024 11:18:06 +0100 Subject: [PATCH 032/101] Restore cluster syst errors for PbPb only --- DATA/production/configurations/asyncReco/setenv_extra.sh | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/DATA/production/configurations/asyncReco/setenv_extra.sh b/DATA/production/configurations/asyncReco/setenv_extra.sh index 1036128d6..bc65f87fb 100644 --- a/DATA/production/configurations/asyncReco/setenv_extra.sh +++ b/DATA/production/configurations/asyncReco/setenv_extra.sh @@ -384,7 +384,10 @@ elif [[ $ALIGNLEVEL == 1 ]]; then export TPC_CORR_SCALING+=" --enable-M-shape-correction " fi - if [[ $ALIEN_JDL_LPMANCHORYEAR == "2023" ]] && [[ $BEAMTYPE == "PbPb" ]] ; then + if [[ $ALIEN_JDL_LPMANCHORYEAR == "2023" ]] && [[ $BEAMTYPE == "PbPb" ]] ; then + # adding additional cluster errors + # the values below should be squared, but the validation of those values (0.01 and 0.0225) is ongoing + TPCEXTRAERR=";GPU_rec_tpc.clusterError2AdditionalY=0.1;GPU_rec_tpc.clusterError2AdditionalZ=0.15;" if [[ $SCALE_WITH_ZDC == 1 ]]; then echo "For 2023 PbPb ZDC inst. lumi applying factor 2.414" export TPC_CORR_SCALING+=";TPCCorrMap.lumiInstFactor=2.414;" @@ -413,9 +416,6 @@ elif [[ $ALIGNLEVEL == 1 ]]; then fi -# adding additional cluster errors -# the values below should be squared, but the validation of those values (0.01 and 0.0225) is ongoing -#TPCEXTRAERR=";GPU_rec_tpc.clusterError2AdditionalY=0.1;GPU_rec_tpc.clusterError2AdditionalZ=0.15;" TRACKTUNETPC=${TPCEXTRAERR-} # combining parameters From d71860acc608c1ecbe07d0780f75502469c21e5f Mon Sep 17 00:00:00 2001 From: benedikt-voelkel Date: Tue, 13 Feb 2024 15:02:21 +0100 Subject: [PATCH 033/101] [RelVal] Fix index (#1456) --- RelVal/utils/o2dpg_release_validation_utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/RelVal/utils/o2dpg_release_validation_utils.py b/RelVal/utils/o2dpg_release_validation_utils.py index ca1d786f5..0482569aa 100755 --- a/RelVal/utils/o2dpg_release_validation_utils.py +++ b/RelVal/utils/o2dpg_release_validation_utils.py @@ -400,7 +400,7 @@ def load(self, summaries_to_test): idx, metric = self.get_metric_checking_dict(line) if idx is None: # in this case, this metric is new - idx = len(self.metrics) - 1 + idx = len(self.metrics) if not self.add_metric(metric): # only attempt to add if that metric is not yet there continue From 45852470c738a1cf04f827371349f7a9a57ee8f1 Mon Sep 17 00:00:00 2001 From: benedikt-voelkel Date: Tue, 13 Feb 2024 16:04:17 +0100 Subject: [PATCH 034/101] [SimWF] Allow any detectors (#1441) * always run all digi stages independent of which detectors produce hits * run all reco and match stages on top of that * update TOF match sources * add TRD and HMP sources * introduces implicit modularisation * empty digit and reco files are always produced for missing detector hit files --> technicannly allows for any detector composition --> in particular crucial for anchored simulations --> enables the simulation of single detectors for specific expert studies when other material budget is not needed or has to be avoided even * do not hard-code ALIEN_JDL_WORKFLOWDETECTORS but actually allow to take it from the outside JDL in anchorMC.sh Co-authored-by: Benedikt Volkel --- MC/bin/o2dpg_sim_workflow.py | 213 ++++++++++------------------ MC/bin/o2dpg_workflow_utils.py | 6 +- MC/run/ANCHOR/anchorMC.sh | 5 +- UTILS/parse-async-WorkflowConfig.py | 7 + 4 files changed, 89 insertions(+), 142 deletions(-) diff --git a/MC/bin/o2dpg_sim_workflow.py b/MC/bin/o2dpg_sim_workflow.py index aaf9f640a..15f54c3bd 100755 --- a/MC/bin/o2dpg_sim_workflow.py +++ b/MC/bin/o2dpg_sim_workflow.py @@ -34,7 +34,7 @@ sys.path.append(join(dirname(__file__), '.', 'o2dpg_workflow_utils')) -from o2dpg_workflow_utils import createTask, createGlobalInitTask, dump_workflow, adjust_RECO_environment, isActive, activate_detector +from o2dpg_workflow_utils import createTask, createGlobalInitTask, dump_workflow, adjust_RECO_environment, isActive, activate_detector, deactivate_detector from o2dpg_qc_finalization_workflow import include_all_QC_finalization from o2dpg_sim_config import create_sim_config, create_geant_config, constructConfigKeyArg @@ -194,14 +194,36 @@ def load_external_config(configfile): print(f"INFO: Written additional config key parameters to JSON {config_key_param_path}") json.dump(anchorConfig, f, indent=2) +def get_anchor_env_var(key, default): + return anchorConfig.get('env_vars',{}).get(key, default) + # with this we can tailor the workflow to the presence of # certain detectors +# these are all detectors that should be assumed active +readout_detectors = args.readoutDets +# here are all detectors that have been set in an anchored script activeDetectors = anchorConfig.get('o2-ctf-reader-workflow-options',{}).get('onlyDet','all') +if activeDetectors == 'all': + # if "all" here, there was in fact nothing in the anchored script, set to what is passed to this script (which it either also "all" or a subset) + activeDetectors = readout_detectors +elif readout_detectors != 'all' and activeDetectors != 'all': + # in this case both are comma-seperated lists. Take intersection + r = set(readout_detectors.split(',')) + a = set(activeDetectors.split(',')) + activeDetectors = ','.join(r & a) +# the last case: simply take what comes from the anchored config + # convert to set/hashmap -activeDetectors = { det:1 for det in activeDetectors.split(",") } +activeDetectors = { det:1 for det in activeDetectors.split(',') } for det in activeDetectors: activate_detector(det) +if not args.with_ZDC: + # deactivate to be able to use isActive consistently for ZDC + deactivate_detector('ZDC') + if 'ZDC' in activeDetectors: + del activeDetectors['ZDC'] + def addWhenActive(detID, needslist, appendstring): if isActive(detID): needslist.append(appendstring) @@ -291,7 +313,7 @@ def extractVertexArgs(configKeyValuesStr, finalDiamondDict): NTIMEFRAMES=int(args.tf) NWORKERS=args.j -MODULES = "--skipModules ZDC" if not args.with_ZDC else "" +MODULES = "--skipModules ZDC" if not isActive("ZDC") else "" SIMENGINE=args.e BFIELD=args.field RNDSEED=args.seed # typically the argument should be the jobid, but if we get None the current time is used for the initialisation @@ -444,7 +466,7 @@ def getDPL_global_options(bigshm=False, ccdbbackend=True): + ('',' --timestamp ' + str(args.timestamp))[args.timestamp!=-1] + ' --run ' + str(args.run) \ + ' --vertexMode kCCDB' - if not "all" in activeDetectors: + if not isActive('all'): BKGtask['cmd'] += ' --readoutDetectors ' + " ".join(activeDetectors) workflow['stages'].append(BKGtask) @@ -475,9 +497,7 @@ def getDPL_global_options(bigshm=False, ccdbbackend=True): workflow['stages'].append(BKG_HEADER_task) # a list of smaller sensors (used to construct digitization tasks in a parametrized way) -smallsensorlist = [ "ITS", "TOF", "FDD", "MCH", "MID", "MFT", "HMP", "PHS", "CPV" ] -if args.with_ZDC: - smallsensorlist += [ "ZDC" ] +smallsensorlist = [ "ITS", "TOF", "FDD", "MCH", "MID", "MFT", "HMP", "PHS", "CPV", "ZDC" ] # a list of detectors that serve as input for the trigger processor CTP --> these need to be processed together for now ctp_trigger_inputlist = [ "FT0", "FV0", "EMC" ] @@ -693,7 +713,7 @@ def getDPL_global_options(bigshm=False, ccdbbackend=True): + ' -o ' + signalprefix + ' ' + embeddinto \ + ('', ' --timestamp ' + str(args.timestamp))[args.timestamp!=-1] + ' --run ' + str(args.run) \ + ' --vertexMode kCCDB' - if not "all" in activeDetectors: + if not isActive('all'): SGNtask['cmd'] += ' --readoutDetectors ' + " ".join(activeDetectors) if args.pregenCollContext == True: SGNtask['cmd'] += ' --fromCollContext collisioncontext.root' @@ -839,9 +859,9 @@ def putConfigValuesNew(listOfMainKeys=[], localCF = {}): TPCDigitask=createTask(name='tpcdigi_'+str(tf), needs=tpcdigineeds, tf=tf, cwd=timeframeworkdir, lab=["DIGI"], cpu=NWORKERS, mem=str(tpcdigimem)) TPCDigitask['cmd'] = ('','ln -nfs ../bkg_HitsTPC.root . ;')[doembedding] - TPCDigitask['cmd'] += '${O2_ROOT}/bin/o2-sim-digitizer-workflow ' + getDPL_global_options() + ' -n ' + str(args.ns) + simsoption \ - + ' --onlyDet TPC --TPCuseCCDB --interactionRate ' + str(INTRATE) + ' --tpc-lanes ' + str(NWORKERS) \ - + ' --incontext ' + str(CONTEXTFILE) + ' --disable-write-ini --early-forward-policy always ' \ + TPCDigitask['cmd'] += '${O2_ROOT}/bin/o2-sim-digitizer-workflow ' + getDPL_global_options() + ' -n ' + str(args.ns) + simsoption \ + + ' --onlyDet TPC --TPCuseCCDB --interactionRate ' + str(INTRATE) + ' --tpc-lanes ' + str(NWORKERS) \ + + ' --incontext ' + str(CONTEXTFILE) + ' --disable-write-ini --early-forward-policy always --forceSelectedDets ' \ + putConfigValuesNew(["TPCGasParam","TPCGEMParam","TPCEleParam","TPCITCorr","TPCDetParam"], localCF={"DigiParams.maxOrbitsToDigitize" : str(orbitsPerTF), "DigiParams.seed" : str(TFSEED)}) TPCDigitask['cmd'] += (' --tpc-chunked-writer','')[args.no_tpc_digitchunking] @@ -859,10 +879,9 @@ def putConfigValuesNew(listOfMainKeys=[], localCF = {}): TRDDigitask['cmd'] = ('','ln -nfs ../bkg_HitsTRD.root . ;')[doembedding] TRDDigitask['cmd'] += '${O2_ROOT}/bin/o2-sim-digitizer-workflow ' + getDPL_global_options() + ' -n ' + str(args.ns) + simsoption \ + ' --onlyDet TRD --interactionRate ' + str(INTRATE) + ' --incontext ' + str(CONTEXTFILE) + ' --disable-write-ini' \ - + putConfigValuesNew(localCF={"TRDSimParams.digithreads" : NWORKERS, "DigiParams.seed" : str(TFSEED)}) + + putConfigValuesNew(localCF={"TRDSimParams.digithreads" : NWORKERS, "DigiParams.seed" : str(TFSEED)}) + " --forceSelectedDets" TRDDigitask['cmd'] += ('',' --disable-mc')[args.no_mc_labels] - if isActive("TRD"): - workflow['stages'].append(TRDDigitask) + workflow['stages'].append(TRDDigitask) # these are digitizers which are single threaded def createRestDigiTask(name, det='ALLSMALLER'): @@ -882,31 +901,25 @@ def createRestDigiTask(name, det='ALLSMALLER'): tf=tf, cwd=timeframeworkdir, lab=["DIGI","SMALLDIGI"], cpu='1') t['cmd'] = ('','ln -nfs ../bkg_Hits*.root . ;')[doembedding] detlist = '' - for d in smallsensorlist: - if isActive(d): - if len(detlist) > 0: - detlist += ',' - detlist += d + detlist = ','.join(smallsensorlist) t['cmd'] += commondigicmd + ' --onlyDet ' + detlist - t['cmd'] += ' --ccdb-tof-sa ' + t['cmd'] += ' --ccdb-tof-sa --forceSelectedDets ' t['cmd'] += (' --combine-devices ','')[args.no_combine_dpl_devices] t['cmd'] += ('',' --disable-mc')[args.no_mc_labels] workflow['stages'].append(t) return t else: # here we create individual digitizers - if isActive(det): - if usebkgcache: - tneeds += [ BKG_HITDOWNLOADER_TASKS[det]['name'] ] - t = createTask(name=name, needs=tneeds, - tf=tf, cwd=timeframeworkdir, lab=["DIGI","SMALLDIGI"], cpu='1') - t['cmd'] = ('','ln -nfs ../bkg_Hits' + str(det) + '.root . ;')[doembedding] - t['cmd'] += commondigicmd + ' --onlyDet ' + str(det) - t['cmd'] += ('',' --disable-mc')[args.no_mc_labels] - if det == 'TOF': - t['cmd'] += ' --ccdb-tof-sa' - workflow['stages'].append(t) - return t + if usebkgcache: + tneeds += [ BKG_HITDOWNLOADER_TASKS[det]['name'] ] + t = createTask(name=name, needs=tneeds, tf=tf, cwd=timeframeworkdir, lab=["DIGI","SMALLDIGI"], cpu='1') + t['cmd'] = ('','ln -nfs ../bkg_Hits' + str(det) + '.root . ;')[doembedding] + t['cmd'] += commondigicmd + ' --onlyDet ' + str(det) + t['cmd'] += ('',' --disable-mc')[args.no_mc_labels] + if det == 'TOF': + t['cmd'] += ' --ccdb-tof-sa' + workflow['stages'].append(t) + return t det_to_digitask={} @@ -923,18 +936,19 @@ def createRestDigiTask(name, det='ALLSMALLER'): tneeds = [ContextTask['name']] if includeQED: tneeds += [QED_task['name']] - t = createTask(name="ft0fv0emcctp_digi_" + str(tf), needs=tneeds, + FT0FV0EMCCTPDIGItask = createTask(name="ft0fv0emcctp_digi_" + str(tf), needs=tneeds, tf=tf, cwd=timeframeworkdir, lab=["DIGI","SMALLDIGI"], cpu='1') - t['cmd'] = ('','ln -nfs ../bkg_HitsFT0.root . ; ln -nfs ../bkg_HitsFV0.root . ;')[doembedding] - t['cmd'] += '${O2_ROOT}/bin/o2-sim-digitizer-workflow ' + getDPL_global_options() + ' -n ' + str(args.ns) + simsoption \ + FT0FV0EMCCTPDIGItask['cmd'] = ('','ln -nfs ../bkg_HitsFT0.root . ; ln -nfs ../bkg_HitsFV0.root . ;')[doembedding] + FT0FV0EMCCTPDIGItask['cmd'] += '${O2_ROOT}/bin/o2-sim-digitizer-workflow ' + getDPL_global_options() + ' -n ' + str(args.ns) + simsoption \ + ' --onlyDet FT0,FV0,EMC,CTP --interactionRate ' + str(INTRATE) + ' --incontext ' + str(CONTEXTFILE) \ + ' --disable-write-ini' + putConfigValuesNew(localCF={"DigiParams.seed" : str(TFSEED)}) \ - + (' --combine-devices','')[args.no_combine_dpl_devices] + ('',' --disable-mc')[args.no_mc_labels] + QEDdigiargs - workflow['stages'].append(t) - det_to_digitask["FT0"]=t - det_to_digitask["FV0"]=t - det_to_digitask["EMC"]=t - det_to_digitask["CTP"]=t + + (' --combine-devices','')[args.no_combine_dpl_devices] + ('',' --disable-mc')[args.no_mc_labels] + QEDdigiargs \ + + ' --forceSelectedDets' + workflow['stages'].append(FT0FV0EMCCTPDIGItask) + det_to_digitask["FT0"]=FT0FV0EMCCTPDIGItask + det_to_digitask["FV0"]=FT0FV0EMCCTPDIGItask + det_to_digitask["EMC"]=FT0FV0EMCCTPDIGItask + det_to_digitask["CTP"]=FT0FV0EMCCTPDIGItask def getDigiTaskName(det): t = det_to_digitask.get(det) @@ -1007,6 +1021,7 @@ def getDigiTaskName(det): # FIXME This is so far a workaround to avoud a race condition for trdcalibratedtracklets.root TRDTRACKINGtask2 = createTask(name='trdreco2_'+str(tf), needs=[TRDTRACKINGtask['name']], tf=tf, cwd=timeframeworkdir, lab=["RECO"], cpu='1', mem='2000') + trd_track_sources = anchorConfig.get('o2-trd-global-tracking-options', {}).get('track-sources', 'TPC,ITS-TPC') TRDTRACKINGtask2['cmd'] = '${O2_ROOT}/bin/o2-trd-global-tracking ' + getDPL_global_options(bigshm=True) + ('',' --disable-mc')[args.no_mc_labels] \ + putConfigValuesNew(['ITSClustererParam', 'ITSCATrackerParam', @@ -1015,7 +1030,7 @@ def getDigiTaskName(det): 'ft0tag', 'TPCGasParam', 'TPCCorrMap'], {"NameConf.mDirMatLUT" : ".."}) \ - + " --track-sources " + anchorConfig.get("o2-trd-global-tracking-options",{}).get("track-sources","all") \ + + " --track-sources " + trd_track_sources \ + tpc_corr_scaling_options workflow['stages'].append(TRDTRACKINGtask2) @@ -1023,11 +1038,9 @@ def getDigiTaskName(det): TOFRECOtask['cmd'] = '${O2_ROOT}/bin/o2-tof-reco-workflow --use-ccdb ' + getDPL_global_options() + putConfigValuesNew() + ('',' --disable-mc')[args.no_mc_labels] workflow['stages'].append(TOFRECOtask) - toftpcmatchneeds = [TOFRECOtask['name'], TPCRECOtask['name']] - toftracksrcdefault = "TPC,ITS-TPC" - if isActive('TRD'): - toftpcmatchneeds.append(TRDTRACKINGtask2['name']) - toftracksrcdefault+=",TPC-TRD,ITS-TPC-TRD" + + toftpcmatchneeds = [TOFRECOtask['name'], TPCRECOtask['name'], ITSTPCMATCHtask['name'], TRDTRACKINGtask2['name']] + toftracksrcdefault = anchorConfig.get('o2-tof-matcher-workflow-options', {}).get('track-sources', 'TPC,ITS-TPC,TPC-TRD,ITS-TPC-TRD') TOFTPCMATCHERtask = createTask(name='toftpcmatch_'+str(tf), needs=toftpcmatchneeds, tf=tf, cwd=timeframeworkdir, lab=["RECO"], mem='1000') TOFTPCMATCHERtask['cmd'] = '${O2_ROOT}/bin/o2-tof-matcher-workflow ' + getDPL_global_options() \ + putConfigValuesNew(["ITSClustererParam", @@ -1037,7 +1050,7 @@ def getDigiTaskName(det): 'MFTClustererParam', 'GPU_rec_tpc', 'trackTuneParams']) \ - + " --track-sources " + anchorConfig.get("o2-tof-matcher-workflow-options",{}).get("track-sources",toftracksrcdefault) + (' --combine-devices','')[args.no_combine_dpl_devices] \ + + " --track-sources " + toftracksrcdefault + (' --combine-devices','')[args.no_combine_dpl_devices] \ + tpc_corr_scaling_options workflow['stages'].append(TOFTPCMATCHERtask) @@ -1100,11 +1113,10 @@ def getDigiTaskName(det): CPVRECOtask['cmd'] += ('',' --disable-mc')[args.no_mc_labels] workflow['stages'].append(CPVRECOtask) - if args.with_ZDC: - ZDCRECOtask = createTask(name='zdcreco_'+str(tf), needs=[getDigiTaskName("ZDC")], tf=tf, cwd=timeframeworkdir, lab=["RECO", "ZDC"]) - ZDCRECOtask['cmd'] = '${O2_ROOT}/bin/o2-zdc-digits-reco ' + getDPL_global_options() + putConfigValues() - ZDCRECOtask['cmd'] += ('',' --disable-mc')[args.no_mc_labels] - workflow['stages'].append(ZDCRECOtask) + ZDCRECOtask = createTask(name='zdcreco_'+str(tf), needs=[getDigiTaskName("ZDC")], tf=tf, cwd=timeframeworkdir, lab=["RECO", "ZDC"]) + ZDCRECOtask['cmd'] = '${O2_ROOT}/bin/o2-zdc-digits-reco ' + getDPL_global_options() + putConfigValues() + ZDCRECOtask['cmd'] += ('',' --disable-mc')[args.no_mc_labels] + workflow['stages'].append(ZDCRECOtask) ## forward matching MCHMIDMATCHtask = createTask(name='mchmidMatch_'+str(tf), needs=[MCHRECOtask['name'], MIDRECOtask['name']], tf=tf, cwd=timeframeworkdir, lab=["RECO"], mem='1500') @@ -1130,56 +1142,16 @@ def getDigiTaskName(det): HMPRECOtask['cmd'] = '${O2_ROOT}/bin/o2-hmpid-digits-to-clusters-workflow ' + getDPL_global_options(ccdbbackend=False) + putConfigValuesNew() workflow['stages'].append(HMPRECOtask) - HMPMATCHtask = createTask(name='hmpmatch_'+str(tf), needs=[HMPRECOtask['name'],ITSTPCMATCHtask['name'],TOFTPCMATCHERtask['name']], tf=tf, cwd=timeframeworkdir, lab=["RECO"], mem='1000') - HMPMATCHtask['cmd'] = '${O2_ROOT}/bin/o2-hmpid-matcher-workflow ' + getDPL_global_options() + putConfigValuesNew() + hmpmatchneeds = [HMPRECOtask['name'], ITSTPCMATCHtask['name'], TOFTPCMATCHERtask['name'], TRDTRACKINGtask2['name']] + hmp_match_sources = anchorConfig.get('o2-hmpid-matcher-workflow', {}).get('track-sources', 'ITS-TPC,ITS-TPC-TRD,TPC-TRD') + HMPMATCHtask = createTask(name='hmpmatch_'+str(tf), needs=hmpmatchneeds, tf=tf, cwd=timeframeworkdir, lab=["RECO"], mem='1000') + HMPMATCHtask['cmd'] = '${O2_ROOT}/bin/o2-hmpid-matcher-workflow --track-sources ' + hmp_match_sources + getDPL_global_options() + putConfigValuesNew() workflow['stages'].append(HMPMATCHtask) # Take None as default, we only add more if nothing from anchorConfig - pvfinder_sources = anchorConfig.get("o2-primary-vertexing-workflow-options",{}).get("vertexing-sources", None) - pvfinder_matching_sources = anchorConfig.get("o2-primary-vertexing-workflow-options",{}).get("vertex-track-matching-sources", None) - pvfinderneeds = [ITSTPCMATCHtask['name']] - if not pvfinder_sources: - pvfinder_sources = "ITS,ITS-TPC,ITS-TPC-TRD,ITS-TPC-TOF,ITS-TPC-TRD-TOF" - if not pvfinder_matching_sources: - pvfinder_matching_sources = "ITS,MFT,TPC,ITS-TPC,MCH,MFT-MCH,TPC-TOF,TPC-TRD,ITS-TPC-TRD,ITS-TPC-TOF,ITS-TPC-TRD-TOF" - if isActive("MID"): - pvfinder_matching_sources += ",MID" - pvfinderneeds += [MIDRECOtask['name']] - if isActive('MCH') and isActive('MID'): - pvfinder_matching_sources += ",MCH-MID" - pvfinderneeds += [MCHMIDMATCHtask['name']] - - if isActive('FT0'): - pvfinderneeds += [FT0RECOtask['name']] - pvfinder_matching_sources += ",FT0" - if isActive('FV0'): - pvfinderneeds += [FV0RECOtask['name']] - pvfinder_matching_sources += ",FV0" - if isActive('FDD'): - pvfinderneeds += [FT0RECOtask['name']] - pvfinder_matching_sources += ",FDD" - if isActive('EMC'): - pvfinderneeds += [EMCRECOtask['name']] - pvfinder_matching_sources += ",EMC" - if isActive('PHS'): - pvfinderneeds += [PHSRECOtask['name']] - pvfinder_matching_sources += ",PHS" - if isActive('CPV'): - pvfinderneeds += [CPVRECOtask['name']] - pvfinder_matching_sources += ",CPV" - if isActive('TOF'): - pvfinderneeds += [TOFTPCMATCHERtask['name']] - if isActive('MFT'): - pvfinderneeds += [MFTRECOtask['name']] - if isActive('MCH'): - pvfinderneeds += [MCHRECOtask['name']] - if isActive('TRD'): - pvfinderneeds += [TRDTRACKINGtask2['name']] - if isActive('FDD'): - pvfinderneeds += [FDDRECOtask['name']] - if isActive('MFT') and isActive('MCH'): - pvfinderneeds += [MFTMCHMATCHtask['name']] - + pvfinder_sources = anchorConfig.get('o2-primary-vertexing-workflow-options', {}).get('vertexing-sources', 'ITS-TPC,TPC-TRD,ITS-TPC-TRD,TPC-TOF,ITS-TPC-TOF,TPC-TRD-TOF,ITS-TPC-TRD-TOF,MFT-MCH,MCH-MID,ITS,MFT,TPC,TOF,FT0,MID,EMC,PHS,CPV,FDD,HMP,FV0,TRD,MCH,CTP') + pvfinder_matching_sources = anchorConfig.get('', {}).get('vertex-track-matching-sources', 'ITS-TPC,TPC-TRD,ITS-TPC-TRD,TPC-TOF,ITS-TPC-TOF,TPC-TRD-TOF,ITS-TPC-TRD-TOF,MFT-MCH,MCH-MID,ITS,MFT,TPC,TOF,FT0,MID,EMC,PHS,CPV,ZDC,FDD,HMP,FV0,TRD,MCH,CTP') + pvfinderneeds = [TRDTRACKINGtask2['name'], FT0RECOtask['name'], FV0RECOtask['name'], EMCRECOtask['name'], PHSRECOtask['name'], CPVRECOtask['name'], FDDRECOtask['name'], ZDCRECOtask['name'], HMPMATCHtask['name'], HMPMATCHtask['name'], ITSTPCMATCHtask['name'], TOFTPCMATCHERtask['name'], MFTMCHMATCHtask['name'], MCHMIDMATCHtask['name']] PVFINDERtask = createTask(name='pvfinder_'+str(tf), needs=pvfinderneeds, tf=tf, cwd=timeframeworkdir, lab=["RECO"], cpu=NWORKERS, mem='4000') PVFINDERtask['cmd'] = '${O2_ROOT}/bin/o2-primary-vertexing-workflow ' \ @@ -1337,15 +1309,12 @@ def addQCPerTF(taskName, needs, readerCommand, configFilePath, objectsFile=''): if COLTYPE == "PbPb" or (doembedding and COLTYPEBKG == "PbPb"): svfinder_threads = ' --threads 8 ' svfinder_cpu = 8 + SVFINDERtask = createTask(name='svfinder_'+str(tf), needs=[PVFINDERtask['name'], FT0FV0EMCCTPDIGItask['name']], tf=tf, cwd=timeframeworkdir, lab=["RECO"], cpu=svfinder_cpu, mem='5000') SVFINDERtask = createTask(name='svfinder_'+str(tf), needs=[PVFINDERtask['name']], tf=tf, cwd=timeframeworkdir, lab=["RECO"], cpu=svfinder_cpu, mem='5000') SVFINDERtask['cmd'] = '${O2_ROOT}/bin/o2-secondary-vertexing-workflow ' SVFINDERtask['cmd'] += getDPL_global_options(bigshm=True) + svfinder_threads + putConfigValuesNew(['svertexer', 'TPCCorrMap'], {"NameConf.mDirMatLUT" : ".."}) # Take None as default, we only add more if nothing from anchorConfig - svfinder_sources = anchorConfig.get("o2-secondary-vertexing-workflow-options",{}).get("vertexing-sources", None) - if not svfinder_sources: - svfinder_sources = "ITS,TPC,ITS-TPC,TPC-TRD,TPC-TOF,ITS-TPC-TRD,ITS-TPC-TOF,ITS-TPC-TRD-TOF" - if isActive("MID"): - svfinder_sources += ",MID" + svfinder_sources = anchorConfig.get('o2-secondary-vertexing-workflow-options', {}). get('vertexing-sources', 'ITS-TPC,TPC-TRD,ITS-TPC-TRD,TPC-TOF,ITS-TPC-TOF,TPC-TRD-TOF,ITS-TPC-TRD-TOF,MFT-MCH,MCH-MID,ITS,MFT,TPC,TOF,FT0,MID,EMC,PHS,CPV,ZDC,FDD,HMP,FV0,TRD,MCH,CTP') SVFINDERtask['cmd'] += ' --vertexing-sources ' + svfinder_sources + (' --combine-source-devices','')[args.no_combine_dpl_devices] # strangeness tracking is now called from the secondary vertexer if not args.with_strangeness_tracking: @@ -1359,39 +1328,9 @@ def addQCPerTF(taskName, needs, readerCommand, configFilePath, objectsFile=''): # produce AOD # ----------- # TODO This needs further refinement, sources and dependencies should be constructed dynamically - aodinfosources = 'ITS,MFT,MCH,TPC,ITS-TPC,MFT-MCH,ITS-TPC-TOF,TPC-TOF,FT0,FDD,TPC-TRD,ITS-TPC-TRD,ITS-TPC-TRD-TOF' + aodinfosources = anchorConfig.get('o2-aod-producer-workflow-options', {}).get('info-sources', 'ITS-TPC,TPC-TRD,ITS-TPC-TRD,TPC-TOF,ITS-TPC-TOF,TPC-TRD-TOF,ITS-TPC-TRD-TOF,MFT-MCH,MCH-MID,ITS,MFT,TPC,TOF,FT0,MID,EMC,PHS,CPV,ZDC,FDD,HMP,FV0,TRD,MCH,CTP') aodneeds = [PVFINDERtask['name'], SVFINDERtask['name']] - if isActive('CTP'): - aodinfosources += ',CTP' - if isActive('FV0'): - aodneeds += [ FV0RECOtask['name'] ] - aodinfosources += ',FV0' - if isActive('TOF'): - aodneeds += [ TOFRECOtask['name'] ] - if isActive('TRD'): - aodneeds += [ TRDTRACKINGtask2['name'] ] - if isActive('EMC'): - aodneeds += [ EMCRECOtask['name'] ] - aodinfosources += ',EMC' - if isActive('CPV'): - aodneeds += [ CPVRECOtask['name'] ] - aodinfosources += ',CPV' - if isActive('PHS'): - aodneeds += [ PHSRECOtask['name'] ] - aodinfosources += ',PHS' - if isActive('MID'): - aodneeds += [ MIDRECOtask['name'] ] - aodinfosources += ',MID' - if isActive('MID') and isActive('MCH'): - aodneeds += [ MCHMIDMATCHtask['name'] ] - aodinfosources += ',MCH-MID' - if isActive('HMP'): - aodneeds += [ HMPMATCHtask['name'] ] - aodinfosources += ',HMP' - if args.with_ZDC and isActive('ZDC'): - aodneeds += [ ZDCRECOtask['name'] ] - aodinfosources += ',ZDC' if usebkgcache: aodneeds += [ BKG_KINEDOWNLOADER_TASK['name'] ] @@ -1407,7 +1346,7 @@ def addQCPerTF(taskName, needs, readerCommand, configFilePath, objectsFile=''): if args.run_anchored == False: AODtask['cmd'] += ' --aod-timeframe-id ${ALIEN_PROC_ID}' + aod_df_id AODtask['cmd'] += ' ' + getDPL_global_options(bigshm=True) - AODtask['cmd'] += ' --info-sources ' + anchorConfig.get("o2-aod-producer-workflow-options",{}).get("info-sources",str(aodinfosources)) + AODtask['cmd'] += ' --info-sources ' + aodinfosources AODtask['cmd'] += ' --lpmp-prod-tag ${ALIEN_JDL_LPMPRODUCTIONTAG:-unknown}' AODtask['cmd'] += ' --anchor-pass ${ALIEN_JDL_LPMANCHORPASSNAME:-unknown}' AODtask['cmd'] += ' --anchor-prod ${ALIEN_JDL_MCANCHOR:-unknown}' diff --git a/MC/bin/o2dpg_workflow_utils.py b/MC/bin/o2dpg_workflow_utils.py index a029e8dee..748129de2 100755 --- a/MC/bin/o2dpg_workflow_utils.py +++ b/MC/bin/o2dpg_workflow_utils.py @@ -6,6 +6,7 @@ # List of active detectors ACTIVE_DETECTORS = ["all"] +INACTIVE_DETECTORS = [] def activate_detector(det): try: @@ -16,8 +17,11 @@ def activate_detector(det): pass ACTIVE_DETECTORS.append(det) +def deactivate_detector(det): + INACTIVE_DETECTORS.append(det) + def isActive(det): - return "all" in ACTIVE_DETECTORS or det in ACTIVE_DETECTORS + return det not in INACTIVE_DETECTORS and ("all" in ACTIVE_DETECTORS or det in ACTIVE_DETECTORS) def relativeCPU(n_rel, n_workers): # compute number of CPUs from a given number of workers diff --git a/MC/run/ANCHOR/anchorMC.sh b/MC/run/ANCHOR/anchorMC.sh index d0beecc0a..b5d0d80fb 100755 --- a/MC/run/ANCHOR/anchorMC.sh +++ b/MC/run/ANCHOR/anchorMC.sh @@ -79,6 +79,7 @@ done # the only two where there is a real default for export ALIEN_JDL_CPULIMIT=${ALIEN_JDL_CPULIMIT:-${CPULIMIT:-8}} export ALIEN_JDL_SIMENGINE=${ALIEN_JDL_SIMENGINE:-${SIMENGINE:-TGeant4}} +export ALIEN_JDL_WORKFLOWDETECTORS=${ALIEN_JDL_WORKFLOWDETECTORS:-ITS,TPC,TOF,FV0,FT0,FDD,MID,MFT,MCH,TRD,EMC,PHS,CPV,HMP,CTP} # all others MUST be set by the user/on the outside export ALIEN_JDL_LPMANCHORPASSNAME=${ALIEN_JDL_LPMANCHORPASSNAME:-${ANCHORPASSNAME}} export ALIEN_JDL_MCANCHOR=${ALIEN_JDL_MCANCHOR:-${MCANCHOR}} @@ -96,10 +97,6 @@ ALIEN_JDL_LPMPRODUCTIONTAG_KEEP=$ALIEN_JDL_LPMPRODUCTIONTAG echo "Substituting ALIEN_JDL_LPMPRODUCTIONTAG=$ALIEN_JDL_LPMPRODUCTIONTAG with ALIEN_JDL_LPMANCHORPRODUCTION=$ALIEN_JDL_LPMANCHORPRODUCTION for simulating reco pass..." ALIEN_JDL_LPMPRODUCTIONTAG=$ALIEN_JDL_LPMANCHORPRODUCTION -# ZDC causes issues for sim -#export ALIEN_JDL_WORKFLOWDETECTORS=ITS,TPC,TOF,FV0,FT0,FDD,MID,MFT,MCH,TRD,EMC,PHS,CPV,HMP,ZDC,CTP -export ALIEN_JDL_WORKFLOWDETECTORS=ITS,TPC,TOF,FV0,FT0,FDD,MID,MFT,MCH,TRD,EMC,PHS,CPV,HMP,CTP - # check variables that need to be set [ -z "${ALIEN_JDL_LPMANCHORPASSNAME}" ] && { echo "ERROR: Set ALIEN_JDL_LPMANCHORPASSNAME or ANCHORPASSNAME" ; exit 1 ; } [ -z "${ALIEN_JDL_MCANCHOR}" ] && { echo "ERROR: Set ALIEN_JDL_MCANCHOR or MCANCHOR" ; exit 1 ; } diff --git a/UTILS/parse-async-WorkflowConfig.py b/UTILS/parse-async-WorkflowConfig.py index 6252e6c65..15e713e8b 100755 --- a/UTILS/parse-async-WorkflowConfig.py +++ b/UTILS/parse-async-WorkflowConfig.py @@ -195,6 +195,13 @@ def parse_important_DPL_args(cmds, flat_config): corrstring += ' --corrma-lumi-mode ' + s2 # these are some options applied in multiple places (so save them flatly under tpc-corr-scaling) flat_config['tpc-corr-scaling'] = corrstring + + # hmp matching + if cmd == 'o2-hmpid-matcher-workflow': + c = {} + c['track-sources'] = extract_args(tokens, '--track-sources') + flat_config['o2-hmpid-matcher-workflow'] = c + def print_untreated_args(cmds): """ From 62782db57c1b0718d4f44453ab48b3f9899c9acb Mon Sep 17 00:00:00 2001 From: benedikt-voelkel Date: Wed, 14 Feb 2024 09:21:03 +0100 Subject: [PATCH 035/101] [RelVal] Add specific QC objects to compare (#1458) * put files with include patterns at RelVal/config/QC/async/_include_patterns_default.txt * add pattern files for ITS, TPC, TOF * allow comments starting with "#" Co-authored-by: Benedikt Volkel --- .../QC/async/ITS_include_patterns_default.txt | 15 +++++++++++++++ .../QC/async/TOF_include_patterns_defautl.txt | 11 +++++++++++ .../QC/async/TPC_include_patterns_default.txt | 13 +++++++++++++ RelVal/utils/o2dpg_release_validation_utils.py | 8 +++++++- 4 files changed, 46 insertions(+), 1 deletion(-) create mode 100644 RelVal/config/QC/async/ITS_include_patterns_default.txt create mode 100644 RelVal/config/QC/async/TOF_include_patterns_defautl.txt create mode 100644 RelVal/config/QC/async/TPC_include_patterns_default.txt diff --git a/RelVal/config/QC/async/ITS_include_patterns_default.txt b/RelVal/config/QC/async/ITS_include_patterns_default.txt new file mode 100644 index 000000000..e63098e87 --- /dev/null +++ b/RelVal/config/QC/async/ITS_include_patterns_default.txt @@ -0,0 +1,15 @@ +# for data and MC +ITS_Tracks_PhiDistribution$ +ITS_Tracks_NClusters$ +ITS_Tracks_VertexZ$ +ITS_Tracks_VertexRvsZ$ +ITS_Tracks_VertexCoordinates$ +ITS_Clusters_Layer0_AverageClusterSizeSummary$ +ITS_Clusters_Layer3_AverageClusterSizeSummary$ +# only MC +ITS_TracksMc_efficiency_eta_ratioFromTEfficiency$ +ITS_TracksMc_efficiency_phi_ratioFromTEfficiency$ +ITS_TracksMc_efficiency_pt_ratioFromTEfficiency$ +ITS_TracksMc_faketrack_eta_ratioFromTEfficiency$ +ITS_TracksMc_faketrack_phi_ratioFromTEfficiency$ +ITS_TracksMc_faketrack_pt_ratioFromTEfficiency$ diff --git a/RelVal/config/QC/async/TOF_include_patterns_defautl.txt b/RelVal/config/QC/async/TOF_include_patterns_defautl.txt new file mode 100644 index 000000000..2a9204335 --- /dev/null +++ b/RelVal/config/QC/async/TOF_include_patterns_defautl.txt @@ -0,0 +1,11 @@ +# note that all "/" have to be replaced by "_" +TOF_Digits_DecodingErrors$ +TOF_Digits_Multiplicity_Integrated$ +TOF_MatchTrAll_mTOFChi2ITSTPC-ITSTPCTRD$ +TOF_MatchTrAll_mTOFChi2TPC$ +TOF_MatchTrAll_mTOFChi2TPCTRD$ +TOF_MatchTrAll_mEffPt_ITSTPC-ITSTPCTRD_ratioFromTEfficiency$ +TOF_PID_EvTimeTOF$ +TOF_PID_DeltaBCTOFFT0$ +TOF_PID_DeltatPi_Pt_ITSTPC_t0TOF$ +TOF_PID_DeltatPi_Pt_ITSTPCTRD_t0TOF$ diff --git a/RelVal/config/QC/async/TPC_include_patterns_default.txt b/RelVal/config/QC/async/TPC_include_patterns_default.txt new file mode 100644 index 000000000..4472729e5 --- /dev/null +++ b/RelVal/config/QC/async/TPC_include_patterns_default.txt @@ -0,0 +1,13 @@ +# note that all "/" have to be replaced by "_" +TPC_Tracks_hPhiAside$ +TPC_Tracks_hPhiCside$ +TPC_Tracks_hEta$ +TPC_Tracks_hNClustersAfterCuts$ +TPC_Tracks_hQOverPt$ +TPC_Tracks_hDCAr_A_Pos$ +TPC_Tracks_hDCAr_C_Pos$ +TPC_Tracks_hDCArVsEtaPos$ +TPC_Tracks_h2DNClustersPhiAside$ +TPC_Tracks_h2DNClustersPhiCside$ +TPC_PID_hdEdxTotMIP_TPC$ +TPC_PID_hdEdxTotVsP_TPC$ diff --git a/RelVal/utils/o2dpg_release_validation_utils.py b/RelVal/utils/o2dpg_release_validation_utils.py index 0482569aa..2607ab69e 100755 --- a/RelVal/utils/o2dpg_release_validation_utils.py +++ b/RelVal/utils/o2dpg_release_validation_utils.py @@ -282,9 +282,15 @@ def load_this_patterns(patterns): return patterns patterns_from_file = [] - with open(patterns[0][1:], "r") as f: + filename = patterns[0][1:] + if not exists(filename): + print(f"WARNING: Pattern file {filename} does not exist, not extracting any patterns!") + return + with open(filename, "r") as f: for line in f: line = line.strip() + # remove all comments; allows for inline comments or entire comment lines), then take the first token + line = line.split("#")[0].strip() if not line: continue patterns_from_file.append(line) From f751289ebb6644012a7c531ddf12ccd4925ed39c Mon Sep 17 00:00:00 2001 From: jian Date: Mon, 5 Feb 2024 22:55:03 +0100 Subject: [PATCH 036/101] moving MC QC from test ccdb to qcdb --- MC/bin/o2dpg_qc_finalization_workflow.py | 9 +++++---- MC/bin/o2dpg_sim_workflow.py | 5 +++-- 2 files changed, 8 insertions(+), 6 deletions(-) diff --git a/MC/bin/o2dpg_qc_finalization_workflow.py b/MC/bin/o2dpg_qc_finalization_workflow.py index 1b2866944..373989f20 100755 --- a/MC/bin/o2dpg_qc_finalization_workflow.py +++ b/MC/bin/o2dpg_qc_finalization_workflow.py @@ -33,7 +33,7 @@ def QC_finalize_name(name): return name + "_finalize" qcdir = "QC" -def include_all_QC_finalization(ntimeframes, standalone, run, productionTag, conditionDB): +def include_all_QC_finalization(ntimeframes, standalone, run, productionTag, conditionDB, qcdbHost): stages = [] @@ -49,7 +49,7 @@ def add_QC_finalization(taskName, qcConfigPath, needs=None): task = createTask(name=QC_finalize_name(taskName), needs=needs, cwd=qcdir, lab=["QC"], cpu=1, mem='2000') task['cmd'] = f'o2-qc --config {qcConfigPath} --remote-batch {taskName}.root' + \ - f' --override-values "qc.config.Activity.number={run};qc.config.Activity.periodName={productionTag};qc.config.conditionDB.url={conditionDB}"' + \ + f' --override-values "qc.config.database.host={qcdbHost};qc.config.Activity.number={run};qc.config.Activity.periodName={productionTag};qc.config.conditionDB.url={conditionDB}"' + \ ' ' + getDPL_global_options() stages.append(task) @@ -67,7 +67,7 @@ def add_QC_postprocessing(taskName, qcConfigPath, needs, runSpecific, prodSpecif overrideValues = '--override-values "' overrideValues += f'qc.config.Activity.number={run};' if runSpecific else 'qc.config.Activity.number=0;' overrideValues += f'qc.config.Activity.periodName={productionTag};' if prodSpecific else 'qc.config.Activity.periodName=;' - overrideValues += f'qc.config.conditionDB.url={conditionDB}"' + overrideValues += f'qc.config.database.host={qcdbHost};qc.config.conditionDB.url={conditionDB}"' task['cmd'] = f'o2-qc --config {qcConfigPath} ' + \ overrideValues + ' ' + getDPL_global_options() stages.append(task) @@ -119,6 +119,7 @@ def main() -> int: parser.add_argument('-run',help="Run number for this MC", default=300000) parser.add_argument('-productionTag',help="Production tag for this MC", default='unknown') parser.add_argument('-conditionDB',help="CCDB url for QC workflows", default='http://alice-ccdb.cern.ch') + parser.add_argument('-qcdbHost',help="QCDB url for QC object uploading", default='http://ali-qcdbmc-gpn.cern.ch:8083') args = parser.parse_args() print (args) @@ -140,7 +141,7 @@ def main() -> int: mkdir(qcdir) workflow={} - workflow['stages'] = include_all_QC_finalization(ntimeframes=1, standalone=True, run=args.run, productionTag=args.productionTag, conditionDB=args.conditionDB) + workflow['stages'] = include_all_QC_finalization(ntimeframes=1, standalone=True, run=args.run, productionTag=args.productionTag, conditionDB=args.conditionDB, qcdbHost=args.qcdbHost) dump_workflow(workflow["stages"], args.o) diff --git a/MC/bin/o2dpg_sim_workflow.py b/MC/bin/o2dpg_sim_workflow.py index 15f54c3bd..5b236027e 100755 --- a/MC/bin/o2dpg_sim_workflow.py +++ b/MC/bin/o2dpg_sim_workflow.py @@ -49,6 +49,7 @@ # - we can also sample it ourselfs here parser.add_argument('--timestamp', type=int, help="Anchoring timestamp (defaults to now)", default=-1) parser.add_argument('--conditionDB',help="CCDB url for QC workflows", default='http://alice-ccdb.cern.ch') +parser.add_argument('--qcdbHost',help="QCDB url for QC object uploading", default='http://ali-qcdbmc-gpn.cern.ch:8083') parser.add_argument('--condition-not-after', type=int, help="only consider CCDB objects not created after this timestamp (for TimeMachine)", default=3385078236000) parser.add_argument('--orbitsPerTF', type=int, help="Timeframe size in number of LHC orbits", default=128) parser.add_argument('--anchor-config',help="JSON file to contextualise workflow with external configs (config values etc.) for instance comping from data reco workflows.", default='') @@ -1168,7 +1169,7 @@ def addQCPerTF(taskName, needs, readerCommand, configFilePath, objectsFile=''): # the --local-batch argument will make QC Tasks store their results in a file and merge with any existing objects task['cmd'] = f'{readerCommand} | o2-qc --config {configFilePath}' + \ f' --local-batch ../{qcdir}/{objectsFile}' + \ - f' --override-values "qc.config.Activity.number={args.run};qc.config.Activity.periodName={args.productionTag};qc.config.Activity.start={args.timestamp};qc.config.conditionDB.url={args.conditionDB}"' + \ + f' --override-values "qc.config.database.host={args.qcdbHost};qc.config.Activity.number={args.run};qc.config.Activity.periodName={args.productionTag};qc.config.Activity.start={args.timestamp};qc.config.conditionDB.url={args.conditionDB}"' + \ ' ' + getDPL_global_options(ccdbbackend=False) # Prevents this task from being run for multiple TimeFrames at the same time, thus trying to modify the same file. task['semaphore'] = objectsFile @@ -1423,7 +1424,7 @@ def addQCPerTF(taskName, needs, readerCommand, configFilePath, objectsFile=''): job_merging = False if includeFullQC: - workflow['stages'].extend(include_all_QC_finalization(ntimeframes=NTIMEFRAMES, standalone=False, run=args.run, productionTag=args.productionTag, conditionDB=args.conditionDB)) + workflow['stages'].extend(include_all_QC_finalization(ntimeframes=NTIMEFRAMES, standalone=False, run=args.run, productionTag=args.productionTag, conditionDB=args.conditionDB, qcdbHost=args.qcdbHost)) if includeAnalysis: From 4171dcffd87c9de94d627b2769e3f38c5602bdbf Mon Sep 17 00:00:00 2001 From: Chiara Zampolli Date: Tue, 13 Feb 2024 15:11:34 +0100 Subject: [PATCH 037/101] JDL vars don't allow spaces or underscore: only 1 word --- DATA/production/configurations/asyncReco/setenv_extra.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/DATA/production/configurations/asyncReco/setenv_extra.sh b/DATA/production/configurations/asyncReco/setenv_extra.sh index bc65f87fb..d178ca5db 100644 --- a/DATA/production/configurations/asyncReco/setenv_extra.sh +++ b/DATA/production/configurations/asyncReco/setenv_extra.sh @@ -321,7 +321,7 @@ elif [[ $ALIGNLEVEL == 1 ]]; then fi DISABLE_CORRECTIONS= - [[ -n "$ALIEN_JDL_MSHAPE_CORRECTION" && $ALIEN_JDL_MSHAPE_CORRECTION == "0" ]] && ENABLE_MSHAPE=0 || ENABLE_MSHAPE=1 + [[ -n "$ALIEN_JDL_MSHAPECORRECTION" && $ALIEN_JDL_MSHAPECORRECTION == "0" ]] && ENABLE_MSHAPE=0 || ENABLE_MSHAPE=1 if [[ -n $MEAN_IR_FOR_TPC ]] ; then # firs check if corrections were not disabled via MEAN_IR_FOR_TPC if [[ $MEAN_IR_FOR_TPC -gt 0 ]] ; then # positive value overrides map mean lumi @@ -352,7 +352,7 @@ elif [[ $ALIGNLEVEL == 1 ]]; then elif [[ $INST_IR_FOR_TPC == "CTPCCDB" ]]; then # using what we have in the CCDB CTP counters, extracted at the beginning of the script echo "Using CTP CCDB which gave the mean IR of the run at the beginning of the script ($RUN_IR Hz)" export TPC_CORR_SCALING+=";TPCCorrMap.lumiInst=$RUN_IR" - else echo "Unknown setting for INST_IR_FOR_TPC = $INST_IR_FOR_TPC (with ALIEN_JDL_INST_IR_FOR_TPC = $ALIEN_JDL_INST_IR_FOR_TPC)" + else echo "Unknown setting for INST_IR_FOR_TPC = $INST_IR_FOR_TPC (with ALIEN_JDL_INSTIRFORTPC = $ALIEN_JDL_INSTIRFORTPC)" return 1 fi From c8c56d7c6551f73672f197e935d55f4ad57f67c7 Mon Sep 17 00:00:00 2001 From: benedikt-voelkel Date: Wed, 14 Feb 2024 15:46:29 +0100 Subject: [PATCH 038/101] [RelVal] Fix typo in filename (#1459) Co-authored-by: Benedikt Volkel --- ...lude_patterns_defautl.txt => TOF_include_patterns_default.txt} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename RelVal/config/QC/async/{TOF_include_patterns_defautl.txt => TOF_include_patterns_default.txt} (100%) diff --git a/RelVal/config/QC/async/TOF_include_patterns_defautl.txt b/RelVal/config/QC/async/TOF_include_patterns_default.txt similarity index 100% rename from RelVal/config/QC/async/TOF_include_patterns_defautl.txt rename to RelVal/config/QC/async/TOF_include_patterns_default.txt From b7950acf91dd64500ff647717940a1c927e6ce1d Mon Sep 17 00:00:00 2001 From: benedikt-voelkel Date: Wed, 14 Feb 2024 16:51:34 +0100 Subject: [PATCH 039/101] [RelVal] Add MFT default objects, add comment to ITS file (#1461) Co-authored-by: Benedikt Volkel --- RelVal/config/QC/async/ITS_include_patterns_default.txt | 1 + RelVal/config/QC/async/MFT_include_patterns_default.txt | 9 +++++++++ 2 files changed, 10 insertions(+) create mode 100644 RelVal/config/QC/async/MFT_include_patterns_default.txt diff --git a/RelVal/config/QC/async/ITS_include_patterns_default.txt b/RelVal/config/QC/async/ITS_include_patterns_default.txt index e63098e87..f8a391236 100644 --- a/RelVal/config/QC/async/ITS_include_patterns_default.txt +++ b/RelVal/config/QC/async/ITS_include_patterns_default.txt @@ -1,3 +1,4 @@ +# note that all "/" have to be replaced by "_" # for data and MC ITS_Tracks_PhiDistribution$ ITS_Tracks_NClusters$ diff --git a/RelVal/config/QC/async/MFT_include_patterns_default.txt b/RelVal/config/QC/async/MFT_include_patterns_default.txt new file mode 100644 index 000000000..a2b013ea1 --- /dev/null +++ b/RelVal/config/QC/async/MFT_include_patterns_default.txt @@ -0,0 +1,9 @@ +# note that all "/" have to be replaced by "_" +MFT_Tracks_mMFTTrackEta_5_MinClusters$ +MFT_Tracks_mMFTTrackPhi_5_MinClusters$ +MFT_Tracks_mMFTTrackEta_6_MinClusters$ +MFT_Tracks_mMFTTrackPhi_6_MinClusters$ +MFT_Tracks_mMFTTrackInvQPt$ +MFT_Tracks_mMFTTrackNumberOfClusters$ +MFT_Tracks_mMFTTrackROFSize$ +MFT_Clusters_mClustersROFSize$ From 843a10020b04fa8462ee73ecc371d74a97242f82 Mon Sep 17 00:00:00 2001 From: catalinristea Date: Thu, 15 Feb 2024 09:09:12 +0200 Subject: [PATCH 040/101] Update anchorMC.sh - added proc arg (#1462) * Update anchorMC.sh - added proc arg --- MC/run/ANCHOR/anchorMC.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/MC/run/ANCHOR/anchorMC.sh b/MC/run/ANCHOR/anchorMC.sh index b5d0d80fb..438a790b8 100755 --- a/MC/run/ANCHOR/anchorMC.sh +++ b/MC/run/ANCHOR/anchorMC.sh @@ -191,7 +191,7 @@ ALICEO2_CCDB_LOCALCACHE=${ALICEO2_CCDB_LOCALCACHE:-$(pwd)/ccdb} baseargs="-tf ${NTIMEFRAMES} --split-id ${SPLITID} --prod-split ${PRODSPLIT} --cycle ${CYCLE} --run-number ${ALIEN_JDL_LPMRUNNUMBER}" # these arguments will be passed as well but only evetually be digested by o2dpg_sim_workflow.py which is called from o2dpg_sim_workflow_anchored.py -remainingargs="-gen pythia8 -seed ${SEED} -ns ${NSIGEVENTS} --include-local-qc --pregenCollContext" +remainingargs="-gen pythia8 -proc inel -seed ${SEED} -ns ${NSIGEVENTS} --include-local-qc --pregenCollContext" remainingargs="${remainingargs} -e ${ALIEN_JDL_SIMENGINE} -j ${NWORKERS}" remainingargs="${remainingargs} -productionTag ${ALIEN_JDL_LPMPRODUCTIONTAG:-alibi_anchorTest_tmp}" remainingargs="${remainingargs} --anchor-config config-json.json" From a4f859c3f48682f75487ce8fca7d8530e3fae1fe Mon Sep 17 00:00:00 2001 From: Ole Schmidt Date: Thu, 15 Feb 2024 21:27:38 +0100 Subject: [PATCH 041/101] Debug mode for topology generation (#1460) --- DATA/tools/epn/gen_topo.sh | 6 +++++- DATA/tools/epn/gen_topo_o2dpg.sh | 17 +++++++++++++++-- 2 files changed, 20 insertions(+), 3 deletions(-) diff --git a/DATA/tools/epn/gen_topo.sh b/DATA/tools/epn/gen_topo.sh index 973dd64b5..297dddd2f 100755 --- a/DATA/tools/epn/gen_topo.sh +++ b/DATA/tools/epn/gen_topo.sh @@ -18,7 +18,11 @@ if [[ -z "$EPN2EOS_METAFILES_DIR" ]] && [[ "0$WORKFLOWMODE" != "0print" ]]; then export EPN2EOS_METAFILES_DIR=/data/epn2eos_tool/epn2eos # Directory for epn2eos meta data files fi if [[ $USER == "epn" ]]; then - [[ -z "$GEN_TOPO_WORKDIR" ]] && export GEN_TOPO_WORKDIR=/scratch/services/gen_topo # Working directory for checkout of O2DPG repository and for XML cache. If this directory is wiped, gen_topo will recreate all necessary content the next time it runs. The folder should be persistent to cache workflows. + if [[ "${GEN_TOPO_DEPLOYMENT_TYPE:-}" == "ALICE_STAGING" ]]; then + [[ -z "$GEN_TOPO_WORKDIR" ]] && export GEN_TOPO_WORKDIR=/scratch/services/staging_gen_topo # Working directory for checkout of O2DPG repository and for XML cache. If this directory is wiped, gen_topo will recreate all necessary content the next time it runs. The folder should be persistent to cache workflows. + else + [[ -z "$GEN_TOPO_WORKDIR" ]] && export GEN_TOPO_WORKDIR=/scratch/services/gen_topo # Working directory for checkout of O2DPG repository and for XML cache. If this directory is wiped, gen_topo will recreate all necessary content the next time it runs. The folder should be persistent to cache workflows. + fi else [[ -z "$GEN_TOPO_WORKDIR" ]] && export GEN_TOPO_WORKDIR=$HOME/gen_topo # Working directory for checkout of O2DPG repository and for XML cache. If this directory is wiped, gen_topo will recreate all necessary content the next time it runs. The folder should be persistent to cache workflows. mkdir -p $HOME/gen_topo diff --git a/DATA/tools/epn/gen_topo_o2dpg.sh b/DATA/tools/epn/gen_topo_o2dpg.sh index 29346796d..3da6f701e 100755 --- a/DATA/tools/epn/gen_topo_o2dpg.sh +++ b/DATA/tools/epn/gen_topo_o2dpg.sh @@ -22,6 +22,15 @@ if [[ -z "$MULTIPLICITY_FACTOR_REST" ]]; then echo \$MULTIPLICITY_FACTOR_REST mi if [[ -z "$RECOSHMSIZE" ]]; then echo \$RECOSHMSIZE missing; exit 1; fi # SHM Size for reconstruction collections if [[ -z "$DDSHMSIZE" ]]; then echo \$DDSHMSIZE missing; exit 1; fi # SHM Size for DD +# In case of debug mode, overwrite some settings +if [[ "${DEBUG_TOPOLOGY_GENERATION:=0}" == "1" ]]; then + echo "Debugging mode enabled. Setting options accordingly" 1>&2 + RECO_NUM_NODES_OVERRIDE=1 # to avoid slurm query, specify number of nodes to fixed value + GEN_TOPO_MI100_NODES=1 # also for MI100 nodes + GEN_TOPO_OVERRIDE_TEMPDIR=$PWD # keep temporary files like QC jsons in local directory + EPN2EOS_METAFILES_DIR=/tmp # nothing is written here, just needs to be set to something +fi + # Check settings coming from the EPN if [[ -z "$FILEWORKDIR" ]]; then echo \$FILEWORKDIR missing; exit 1; fi if [[ -z "$INRAWCHANNAME" ]]; then echo \$INRAWCHANNAME missing; exit 1; fi @@ -103,6 +112,7 @@ while true; do break done + if [[ ! -z "$GEN_TOPO_ODC_EPN_TOPO_POST_CACHING_CMD" ]] && [[ "0$WORKFLOWMODE" != "0print" ]]; then TMP_POST_CACHING_CMD="$GEN_TOPO_ODC_EPN_TOPO_POST_CACHING_CMD $GEN_TOPO_ODC_EPN_TOPO_POST_CACHING_ARGS" TMP_POST_CACHING_NMIN=$(( $RECO_NUM_NODES_OVERRIDE > $RECO_MAX_FAIL_NODES_OVERRIDE ? $RECO_NUM_NODES_OVERRIDE - $RECO_MAX_FAIL_NODES_OVERRIDE : 0 )) @@ -126,6 +136,9 @@ if [[ ! -z "$ECS_ENVIRONMENT_ID" && -d "/var/log/topology/" && $USER == "epn" ]] fi cat $GEN_TOPO_WORKDIR/output.xml -echo Removing temporary output file $GEN_TOPO_WORKDIR/output.xml 1>&2 -rm $GEN_TOPO_WORKDIR/output.xml + +if [[ "$DEBUG_TOPOLOGY_GENERATION" == "0" ]]; then + echo Removing temporary output file $GEN_TOPO_WORKDIR/output.xml 1>&2 + rm $GEN_TOPO_WORKDIR/output.xml +fi rm -f $GEN_TOPO_LOCKFILE From bda1a376702e52ad891b2490aad7a52973f23024 Mon Sep 17 00:00:00 2001 From: benedikt-voelkel Date: Fri, 16 Feb 2024 10:13:02 +0100 Subject: [PATCH 042/101] Revert "Update anchorMC.sh - added proc arg (#1462)" This reverts commit 843a10020b04fa8462ee73ecc371d74a97242f82. --- MC/run/ANCHOR/anchorMC.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/MC/run/ANCHOR/anchorMC.sh b/MC/run/ANCHOR/anchorMC.sh index 438a790b8..b5d0d80fb 100755 --- a/MC/run/ANCHOR/anchorMC.sh +++ b/MC/run/ANCHOR/anchorMC.sh @@ -191,7 +191,7 @@ ALICEO2_CCDB_LOCALCACHE=${ALICEO2_CCDB_LOCALCACHE:-$(pwd)/ccdb} baseargs="-tf ${NTIMEFRAMES} --split-id ${SPLITID} --prod-split ${PRODSPLIT} --cycle ${CYCLE} --run-number ${ALIEN_JDL_LPMRUNNUMBER}" # these arguments will be passed as well but only evetually be digested by o2dpg_sim_workflow.py which is called from o2dpg_sim_workflow_anchored.py -remainingargs="-gen pythia8 -proc inel -seed ${SEED} -ns ${NSIGEVENTS} --include-local-qc --pregenCollContext" +remainingargs="-gen pythia8 -seed ${SEED} -ns ${NSIGEVENTS} --include-local-qc --pregenCollContext" remainingargs="${remainingargs} -e ${ALIEN_JDL_SIMENGINE} -j ${NWORKERS}" remainingargs="${remainingargs} -productionTag ${ALIEN_JDL_LPMPRODUCTIONTAG:-alibi_anchorTest_tmp}" remainingargs="${remainingargs} --anchor-config config-json.json" From e13c468a0dc52e06396dd8e4ac43337ad3c59f4d Mon Sep 17 00:00:00 2001 From: shahoian Date: Fri, 16 Feb 2024 17:17:42 +0100 Subject: [PATCH 043/101] adjust matching chi2 cut to 100 and max c14 diff to 2.5 --- DATA/production/configurations/asyncReco/setenv_extra.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/DATA/production/configurations/asyncReco/setenv_extra.sh b/DATA/production/configurations/asyncReco/setenv_extra.sh index d178ca5db..4f9992a67 100644 --- a/DATA/production/configurations/asyncReco/setenv_extra.sh +++ b/DATA/production/configurations/asyncReco/setenv_extra.sh @@ -293,8 +293,8 @@ elif [[ $ALIGNLEVEL == 1 ]]; then ERROB="100e-8" [[ -z $TPCITSTIMEERR ]] && TPCITSTIMEERR="0.2" [[ -z $ITS_CONFIG || "$ITS_CONFIG" != *"--tracking-mode"* ]] && export ITS_CONFIG+=" --tracking-mode async" - CUT_MATCH_CHI2=80 - export ITSTPCMATCH="tpcitsMatch.safeMarginTimeCorrErr=2.;tpcitsMatch.XMatchingRef=60.;tpcitsMatch.cutMatchingChi2=$CUT_MATCH_CHI2;;tpcitsMatch.crudeAbsDiffCut[0]=6;tpcitsMatch.crudeAbsDiffCut[1]=6;tpcitsMatch.crudeAbsDiffCut[2]=0.3;tpcitsMatch.crudeAbsDiffCut[3]=0.3;tpcitsMatch.crudeAbsDiffCut[4]=1.5;tpcitsMatch.crudeNSigma2Cut[0]=64;tpcitsMatch.crudeNSigma2Cut[1]=64;tpcitsMatch.crudeNSigma2Cut[2]=64;tpcitsMatch.crudeNSigma2Cut[3]=64;tpcitsMatch.crudeNSigma2Cut[4]=64;" + CUT_MATCH_CHI2=100 + export ITSTPCMATCH="tpcitsMatch.safeMarginTimeCorrErr=2.;tpcitsMatch.XMatchingRef=60.;tpcitsMatch.cutMatchingChi2=$CUT_MATCH_CHI2;;tpcitsMatch.crudeAbsDiffCut[0]=6;tpcitsMatch.crudeAbsDiffCut[1]=6;tpcitsMatch.crudeAbsDiffCut[2]=0.3;tpcitsMatch.crudeAbsDiffCut[3]=0.3;tpcitsMatch.crudeAbsDiffCut[4]=2.5;tpcitsMatch.crudeNSigma2Cut[0]=64;tpcitsMatch.crudeNSigma2Cut[1]=64;tpcitsMatch.crudeNSigma2Cut[2]=64;tpcitsMatch.crudeNSigma2Cut[3]=64;tpcitsMatch.crudeNSigma2Cut[4]=64;" #-------------------------------------- TPC corrections ----------------------------------------------- # we need to provide to TPC From 598afb3d09044292eeecdbd95d67a93c384f10ca Mon Sep 17 00:00:00 2001 From: swenzel Date: Mon, 5 Feb 2024 16:46:31 +0100 Subject: [PATCH 044/101] Fix type error in pipeline runner --- MC/bin/o2_dpg_workflow_runner.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/MC/bin/o2_dpg_workflow_runner.py b/MC/bin/o2_dpg_workflow_runner.py index 44f56303a..b8cd05bc0 100755 --- a/MC/bin/o2_dpg_workflow_runner.py +++ b/MC/bin/o2_dpg_workflow_runner.py @@ -36,7 +36,7 @@ formatter_class=argparse.ArgumentDefaultsHelpFormatter) parser.add_argument('-f','--workflowfile', help='Input workflow file name', required=True) -parser.add_argument('-jmax','--maxjobs', help='Number of maximal parallel tasks.', default=100) +parser.add_argument('-jmax','--maxjobs', type=int, help='Number of maximal parallel tasks.', default=100) parser.add_argument('-k','--keep-going', action='store_true', help='Keep executing the pipeline as far possibe (not stopping on first failure)') parser.add_argument('--dry-run', action='store_true', help='Show what you would do.') parser.add_argument('--visualize-workflow', action='store_true', help='Saves a graph visualization of workflow.') @@ -1730,5 +1730,5 @@ def speedup_ROOT_Init(): exit(code) actionlogger.info("Running in cgroup") -executor=WorkflowExecutor(args.workflowfile,jmax=args.maxjobs,args=args) +executor=WorkflowExecutor(args.workflowfile,jmax=int(args.maxjobs),args=args) exit (executor.execute()) From e996e47d70442af1400dec323dbef7d1f6bf1aaf Mon Sep 17 00:00:00 2001 From: swenzel Date: Mon, 19 Feb 2024 09:18:36 +0100 Subject: [PATCH 045/101] pipeline_runner: Fix script creation following a recent change in class data layout introduced here: https://github.com/AliceO2Group/O2DPG/commit/ec4acee8f0d38616b6fa45809661213c9b938acc --- MC/bin/o2_dpg_workflow_runner.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/MC/bin/o2_dpg_workflow_runner.py b/MC/bin/o2_dpg_workflow_runner.py index b8cd05bc0..3c826476a 100755 --- a/MC/bin/o2_dpg_workflow_runner.py +++ b/MC/bin/o2_dpg_workflow_runner.py @@ -1488,8 +1488,8 @@ def produce_script(self, filename): # we record the global environment setting # in particular to capture global workflow initialization lines.append('#-- GLOBAL INIT SECTION FROM WORKFLOW --\n') - for e in self.globalenv: - lines.append('export ' + str(e) + '=' + str(self.globalenv[e]) + '\n') + for e in self.globalinit['env']: + lines.append('export ' + str(e) + '=' + str(self.globalinit['env'][e]) + '\n') lines.append('#-- TASKS FROM WORKFLOW --\n') for tid in taskorder: print ('Doing task ' + self.idtotask[tid]) From a883150b1806f5006add0b5523cabb25455df6ca Mon Sep 17 00:00:00 2001 From: benedikt-voelkel Date: Mon, 19 Feb 2024 10:15:06 +0100 Subject: [PATCH 046/101] [AnchorMC] Allow for any additional option for sim WF (#1463) Use as ALIEN_JDL_ANCHOR_SIM_OPTIONS="-ini -confKey "key=value;otherKey=otherValue" -trigger -productionTag myTag" Build the final remainingargs by prepending it. The last argument wins, hence in this case -productionTag would not be overwritten with this user choice Co-authored-by: Benedikt Volkel --- MC/bin/o2dpg_sim_workflow_anchored.py | 3 ++- MC/run/ANCHOR/anchorMC.sh | 10 +++++++--- 2 files changed, 9 insertions(+), 4 deletions(-) diff --git a/MC/bin/o2dpg_sim_workflow_anchored.py b/MC/bin/o2dpg_sim_workflow_anchored.py index a50a0532a..9b859e9f0 100755 --- a/MC/bin/o2dpg_sim_workflow_anchored.py +++ b/MC/bin/o2dpg_sim_workflow_anchored.py @@ -409,7 +409,7 @@ def main(): effTrigger = 28.0 # this is ZDC else: effTrigger = 0.759 - + # time needs to be converted to seconds ==> timestamp / 1000 rate = retrieve_MinBias_CTPScaler_Rate(ctp_scalers, timestamp/1000., effTrigger, grplhcif.getBunchFilling().getNBunches(), ColSystem) @@ -424,6 +424,7 @@ def main(): # we finally pass forward to the unanchored MC workflow creation # TODO: this needs to be done in a pythonic way clearly + # NOTE: forwardargs can - in principle - contain some of the arguments that are appended here. However, the last passed argument wins, so they would be overwritten. forwardargs += " -tf " + str(args.tf) + " --sor " + str(sor) + " --timestamp " + str(timestamp) + " --production-offset " + str(prod_offset) + " -run " + str(args.run_number) + " --run-anchored --first-orbit " \ + str(first_orbit) + " -field ccdb -bcPatternFile ccdb" + " --orbitsPerTF " + str(GLOparams["OrbitsPerTF"]) + " -col " + str(ColSystem) + " -eCM " + str(eCM) + ' --readoutDets ' + GLOparams['detList'] print ("forward args ", forwardargs) diff --git a/MC/run/ANCHOR/anchorMC.sh b/MC/run/ANCHOR/anchorMC.sh index b5d0d80fb..b5c0422c3 100755 --- a/MC/run/ANCHOR/anchorMC.sh +++ b/MC/run/ANCHOR/anchorMC.sh @@ -76,10 +76,12 @@ done # Allow for both "ALIEN_JDL_LPM" as well as "KEY" -# the only two where there is a real default for +# the only four where there is a real default for export ALIEN_JDL_CPULIMIT=${ALIEN_JDL_CPULIMIT:-${CPULIMIT:-8}} export ALIEN_JDL_SIMENGINE=${ALIEN_JDL_SIMENGINE:-${SIMENGINE:-TGeant4}} export ALIEN_JDL_WORKFLOWDETECTORS=${ALIEN_JDL_WORKFLOWDETECTORS:-ITS,TPC,TOF,FV0,FT0,FDD,MID,MFT,MCH,TRD,EMC,PHS,CPV,HMP,CTP} +# can be passed to contain additional options that will be passed to o2dpg_sim_workflow_anchored.py and eventually to o2dpg_sim_workflow.py +export ALIEN_JDL_ANCHOR_SIM_OPTIONS=${ALIEN_JDL_ANCHOR_SIM_OPTIONS:--gen pythia8} # all others MUST be set by the user/on the outside export ALIEN_JDL_LPMANCHORPASSNAME=${ALIEN_JDL_LPMANCHORPASSNAME:-${ANCHORPASSNAME}} export ALIEN_JDL_MCANCHOR=${ALIEN_JDL_MCANCHOR:-${MCANCHOR}} @@ -191,10 +193,12 @@ ALICEO2_CCDB_LOCALCACHE=${ALICEO2_CCDB_LOCALCACHE:-$(pwd)/ccdb} baseargs="-tf ${NTIMEFRAMES} --split-id ${SPLITID} --prod-split ${PRODSPLIT} --cycle ${CYCLE} --run-number ${ALIEN_JDL_LPMRUNNUMBER}" # these arguments will be passed as well but only evetually be digested by o2dpg_sim_workflow.py which is called from o2dpg_sim_workflow_anchored.py -remainingargs="-gen pythia8 -seed ${SEED} -ns ${NSIGEVENTS} --include-local-qc --pregenCollContext" +remainingargs="-seed ${SEED} -ns ${NSIGEVENTS} --include-local-qc --pregenCollContext" remainingargs="${remainingargs} -e ${ALIEN_JDL_SIMENGINE} -j ${NWORKERS}" remainingargs="${remainingargs} -productionTag ${ALIEN_JDL_LPMPRODUCTIONTAG:-alibi_anchorTest_tmp}" -remainingargs="${remainingargs} --anchor-config config-json.json" +# prepend(!) ALIEN_JDL_ANCHOR_SIM_OPTIONS +# since the last passed argument wins, e.g. -productionTag cannot be overwritten by the user +remainingargs="${ALIEN_JDL_ANCHOR_SIM_OPTIONS} ${remainingargs} --anchor-config config-json.json" echo "baseargs: ${baseargs}" echo "remainingargs: ${remainingargs}" From f721817f954fa477b145175122188b42dede1cb4 Mon Sep 17 00:00:00 2001 From: benedikt-voelkel Date: Mon, 19 Feb 2024 11:56:37 +0100 Subject: [PATCH 047/101] [SimCI] Do NOT run everything if anything changed in test directory (#1475) --- test/run_generator_tests.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/run_generator_tests.sh b/test/run_generator_tests.sh index bb256ba78..34efd5443 100755 --- a/test/run_generator_tests.sh +++ b/test/run_generator_tests.sh @@ -378,7 +378,7 @@ pushd ${REPO_DIR} > /dev/null # First check, if testing itself has changed. In that case this will add INI files # for which a test can be found -global_testing_changed=$(get_changed_files | grep -E ".C$|.sh$" | grep "^test/") +global_testing_changed=$(get_changed_files | grep -E "common/kine_tests/test_generic_kine.C|run_generator_tests.sh" | grep "^test/") [[ "${global_testing_changed}" != "" ]] && add_ini_files_from_all_tests # Then add the ini files that have changed as well. We need to do that so we get information From 26319e42c27e7f5c15a17d0b351ebbfc1f18d403 Mon Sep 17 00:00:00 2001 From: David Rohr Date: Mon, 19 Feb 2024 10:09:28 +0100 Subject: [PATCH 048/101] dpl-workflow: Change default FLP processing in sync mode to CTP only --- DATA/common/setenv.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/DATA/common/setenv.sh b/DATA/common/setenv.sh index f7342e553..b4e5152eb 100755 --- a/DATA/common/setenv.sh +++ b/DATA/common/setenv.sh @@ -134,7 +134,7 @@ else # Defaults when running on the EPN if [[ -z "${SHMTHROW:-}" ]]; then export SHMTHROW=0; fi if [[ -z "${TIMEFRAME_SHM_LIMIT:-}" ]]; then export TIMEFRAME_SHM_LIMIT=$(( $SHMSIZE / 2 )); fi if [[ -z "${EDJSONS_DIR:-}" ]]; then export EDJSONS_DIR="/scratch/services/ed/jsons_${RUNTYPE}"; fi - if [[ -z "${WORKFLOW_DETECTORS_FLP_PROCESSING+x}" ]]; then export WORKFLOW_DETECTORS_FLP_PROCESSING="TOF,CTP"; fi # Current default in sync processing is that FLP processing is only enabled for TOF + if [[ -z "${WORKFLOW_DETECTORS_FLP_PROCESSING+x}" ]]; then export WORKFLOW_DETECTORS_FLP_PROCESSING="CTP"; fi # Current default in sync processing is that FLP processing is only enabled for TOF if [[ -z "${GEN_TOPO_AUTOSCALE_PROCESSES:-}" ]]; then export GEN_TOPO_AUTOSCALE_PROCESSES=1; fi # On the EPN we should make sure to always use the node to the full extent fi # Some more options for running on the EPN From d3e61262e4ec8eaf2f4cedd077976f5d6f414704 Mon Sep 17 00:00:00 2001 From: benedikt-voelkel Date: Tue, 20 Feb 2024 10:16:39 +0100 Subject: [PATCH 049/101] [SimWF] Small fixes (#1476) * o2dpg_sim_workflow_anchored.py * remove unused function from CCDBAccessor due to undefined utility function that is used inside it * fix variable name * o2dpg_sim_workflow.py * fix dependency of cleanup task * o2dpg-workflow-tools.py * use return value of function correctly Co-authored-by: Benedikt Volkel --- MC/bin/o2dpg-workflow-tools.py | 7 ++++--- MC/bin/o2dpg_sim_workflow.py | 6 +++--- MC/bin/o2dpg_sim_workflow_anchored.py | 12 +----------- 3 files changed, 8 insertions(+), 17 deletions(-) diff --git a/MC/bin/o2dpg-workflow-tools.py b/MC/bin/o2dpg-workflow-tools.py index 78b82294e..09bd4b115 100755 --- a/MC/bin/o2dpg-workflow-tools.py +++ b/MC/bin/o2dpg-workflow-tools.py @@ -15,15 +15,16 @@ def extend(args): is kept """ # load workflows - workflow_orig = read_workflow(args.orig_wf) - workflow_extend = read_workflow(args.extend_wf) + workflow_orig, meta = read_workflow(args.orig_wf) + workflow_extend, _ = read_workflow(args.extend_wf) # extend workflow_orig.extend(workflow_extend) # dump in new file filename = args.output if args.output else args.orig_wf - dump_workflow(workflow_orig, filename) + # propagate meta information from original workflow that is extended + dump_workflow(workflow_orig, filename, meta) def create(args): diff --git a/MC/bin/o2dpg_sim_workflow.py b/MC/bin/o2dpg_sim_workflow.py index 5b236027e..d0812d42c 100755 --- a/MC/bin/o2dpg_sim_workflow.py +++ b/MC/bin/o2dpg_sim_workflow.py @@ -1039,7 +1039,7 @@ def getDigiTaskName(det): TOFRECOtask['cmd'] = '${O2_ROOT}/bin/o2-tof-reco-workflow --use-ccdb ' + getDPL_global_options() + putConfigValuesNew() + ('',' --disable-mc')[args.no_mc_labels] workflow['stages'].append(TOFRECOtask) - + toftpcmatchneeds = [TOFRECOtask['name'], TPCRECOtask['name'], ITSTPCMATCHtask['name'], TRDTRACKINGtask2['name']] toftracksrcdefault = anchorConfig.get('o2-tof-matcher-workflow-options', {}).get('track-sources', 'TPC,ITS-TPC,TPC-TRD,ITS-TPC-TRD') TOFTPCMATCHERtask = createTask(name='toftpcmatch_'+str(tf), needs=toftpcmatchneeds, tf=tf, cwd=timeframeworkdir, lab=["RECO"], mem='1000') @@ -1407,10 +1407,10 @@ def addQCPerTF(taskName, needs, readerCommand, configFilePath, objectsFile=''): # taking away digits, clusters and other stuff as soon as possible. # TODO: cleanup by labels or task names if args.early_tf_cleanup == True: - TFcleanup = createTask(name='tfcleanup_'+str(tf), needs= [ AOD_merge_task['name'] ], tf=tf, cwd=timeframeworkdir, lab=["CLEANUP"], mem='0', cpu='1') + TFcleanup = createTask(name='tfcleanup_'+str(tf), needs= [ AODtask['name'] ], tf=tf, cwd=timeframeworkdir, lab=["CLEANUP"], mem='0', cpu='1') TFcleanup['cmd'] = 'rm *digi*.root;' TFcleanup['cmd'] += 'rm *cluster*.root' - workflow['stages'].append(TFcleanup); + workflow['stages'].append(TFcleanup) # AOD merging as one global final step aodmergerneeds = ['aod_' + str(tf) for tf in range(1, NTIMEFRAMES + 1)] diff --git a/MC/bin/o2dpg_sim_workflow_anchored.py b/MC/bin/o2dpg_sim_workflow_anchored.py index 9b859e9f0..9708e2bc3 100755 --- a/MC/bin/o2dpg_sim_workflow_anchored.py +++ b/MC/bin/o2dpg_sim_workflow_anchored.py @@ -50,16 +50,6 @@ def __init__(self, url): # we allow nullptr responsens and will treat it ourselves o2.ccdb.BasicCCDBManager.instance().setFatalWhenNull(False) - def list(self, path, dump_path=None): - ret = self.api.list(path, False, "application/json") - ret = json.loads(ret) - if ret and "objects" in ret: - ret = ret["objects"] - if ret and dump_path: - print(f"CCDB object information for path {path} stored in {dump_path}") - dump_json(ret, dump_path) - return ret - def fetch(self, path, obj_type, timestamp=None, meta_info=None): """ TODO We could use CcdbApi::snapshot at some point, needs revision @@ -95,7 +85,7 @@ def retrieve_sor_eor(ccdbreader, run_number): path_run_info = "RCT/Info/RunInformation" header = ccdbreader.fetch_header(path_run_info, run_number) if not header: - print(f"WARNING: Cannot find run information for run number {r}") + print(f"WARNING: Cannot find run information for run number {run_number}") return None # return this a dictionary return {"SOR": int(header["SOR"]), "EOR": int(header["EOR"])} From 2c5d4f3271f066dd0a7e0050b38ccf8ac855b460 Mon Sep 17 00:00:00 2001 From: Ole Schmidt Date: Tue, 20 Feb 2024 10:29:53 +0100 Subject: [PATCH 050/101] Fix syntax checks for shell scripts (#1473) --- DATA/common/gen_topo_helper_functions.sh | 14 +++++++------- DATA/common/setenv_calib.sh | 2 +- .../2022/LHC22f/apass1/setenv_extra.sh | 1 + .../configurations/asyncReco/async_pass.sh | 4 ++-- 4 files changed, 11 insertions(+), 10 deletions(-) diff --git a/DATA/common/gen_topo_helper_functions.sh b/DATA/common/gen_topo_helper_functions.sh index 00fc1f118..6efe2d7f8 100755 --- a/DATA/common/gen_topo_helper_functions.sh +++ b/DATA/common/gen_topo_helper_functions.sh @@ -104,37 +104,37 @@ _check_multiple() has_detectors() { - _check_multiple has_detector $@ + _check_multiple has_detector "$@" } has_detectors_qc() { - _check_multiple has_detector_qc $@ + _check_multiple has_detector_qc "$@" } has_detectors_calib() { - _check_multiple has_detector_calib $@ + _check_multiple has_detector_calib "$@" } has_detectors_reco() { - _check_multiple has_detector_reco $@ + _check_multiple has_detector_reco "$@" } has_detectors_ctf() { - _check_multiple has_detector_ctf $@ + _check_multiple has_detector_ctf "$@" } has_detectors_flp_processing() { - _check_multiple has_detector_flp_processing $@ + _check_multiple has_detector_flp_processing "$@" } workflow_has_parameters() { - _check_multiple workflow_has_parameter $@ + _check_multiple workflow_has_parameter "$@" } add_comma_separated() diff --git a/DATA/common/setenv_calib.sh b/DATA/common/setenv_calib.sh index a1d2e7692..0b44fe23c 100755 --- a/DATA/common/setenv_calib.sh +++ b/DATA/common/setenv_calib.sh @@ -14,7 +14,7 @@ SOURCE_GUARD_SETENV_CALIB=1 # define the conditions for each calibration if has_detector_calib ITS && has_detectors_reco ITS && has_detector_matching PRIMVTX && [[ ! -z "$VERTEXING_SOURCES" ]]; then CAN_DO_CALIB_PRIMVTX_MEANVTX=1; else CAN_DO_CALIB_PRIMVTX_MEANVTX=0; fi if has_detector_calib TOF && has_detector_reco TOF; then CAN_DO_CALIB_TOF_DIAGNOSTICS=1; CAN_DO_CALIB_TOF_INTEGRATEDCURR=1; else CAN_DO_CALIB_TOF_DIAGNOSTICS=0; CAN_DO_CALIB_TOF_INTEGRATEDCURR=0; fi -if has_detector_calib TOF && has_detector_reco TOF && (( has_detectors_reco ITS TPC && has_detector_matching ITSTPCTOF ) || ( has_detectors_reco ITS TPC TRD && has_detector_matching ITSTPCTRDTOF )); then CAN_DO_CALIB_TOF_LHCPHASE=1; CAN_DO_CALIB_TOF_CHANNELOFFSETS=1; else CAN_DO_CALIB_TOF_LHCPHASE=0; CAN_DO_CALIB_TOF_CHANNELOFFSETS=0; fi +if has_detector_calib TOF && has_detector_reco TOF && ( ( has_detectors_reco ITS TPC && has_detector_matching ITSTPCTOF ) || ( has_detectors_reco ITS TPC TRD && has_detector_matching ITSTPCTRDTOF ) ); then CAN_DO_CALIB_TOF_LHCPHASE=1; CAN_DO_CALIB_TOF_CHANNELOFFSETS=1; else CAN_DO_CALIB_TOF_LHCPHASE=0; CAN_DO_CALIB_TOF_CHANNELOFFSETS=0; fi if has_detector_calib TPC && has_detectors ITS TPC TOF TRD && has_detector_matching ITSTPCTRDTOF; then CAN_DO_CALIB_TPC_SCDCALIB=1; else CAN_DO_CALIB_TPC_SCDCALIB=0; fi if has_detector_calib TPC && has_processing_step TPC_DEDX; then CAN_DO_CALIB_TPC_TIMEGAIN=1; CAN_DO_CALIB_TPC_RESPADGAIN=1; else CAN_DO_CALIB_TPC_TIMEGAIN=0; CAN_DO_CALIB_TPC_RESPADGAIN=0; fi if has_detector_calib TPC && has_detectors ITS TPC && has_detector_matching ITSTPC; then CAN_DO_CALIB_TPC_VDRIFTTGL=1; else CAN_DO_CALIB_TPC_VDRIFTTGL=0; fi diff --git a/DATA/production/configurations/2022/LHC22f/apass1/setenv_extra.sh b/DATA/production/configurations/2022/LHC22f/apass1/setenv_extra.sh index 648e9a61b..e814f833a 100644 --- a/DATA/production/configurations/2022/LHC22f/apass1/setenv_extra.sh +++ b/DATA/production/configurations/2022/LHC22f/apass1/setenv_extra.sh @@ -72,6 +72,7 @@ fi CTP_BC_SHIFT=0 if [[ $ALIEN_JDL_LPMANCHORYEAR == "2022" ]]; then CTP_BC_SHIFT=-294 +fi if [[ $RUNNUMBER -ge 538923 ]] && [[ $RUNNUMBER -le 539700 ]]; then # 3 BC offset (future direction) in CTP data observed for LHC23zd - LHC23zs CTP_BC_SHIFT=-3 diff --git a/DATA/production/configurations/asyncReco/async_pass.sh b/DATA/production/configurations/asyncReco/async_pass.sh index 05ed0dea2..9d1f49025 100755 --- a/DATA/production/configurations/asyncReco/async_pass.sh +++ b/DATA/production/configurations/asyncReco/async_pass.sh @@ -576,7 +576,7 @@ else echo "nCTFsFilesInspected_step1 = $nCTFsFilesInspected_step1, nCTFsFilesInspected_step2 = $nCTFsFilesInspected_step2" > validation_error.message echo "nCTFsFilesOK_step1 = $nCTFsFilesOK_step1, nCTFsFilesOK_step2 = $nCTFsFilesOK_step2" > validation_error.message echo "nCTFsProcessed_step1 = $nCTFsProcessed_step1, nCTFsProcessed_step2 = $nCTFsProcessed_step2" > validation_error.message - exit 1000 + exit 255 fi fi fi @@ -745,7 +745,7 @@ if [[ $ALIEN_JDL_AODOFF != 1 ]]; then CURRENT_POOL_SIZE=`jobs -r | wc -l` done < $JOB_LIST # collecting return codes of the merging processes - for i in ${!arr[@]}; do + for i in "${!arr[@]}"; do wait ${arr[$i]} exitcode=$? if [[ $exitcode -ne 0 ]]; then From 5452aba0aaaf8683defeb19a39e334bab8adcdce Mon Sep 17 00:00:00 2001 From: Ole Schmidt Date: Tue, 20 Feb 2024 10:30:16 +0100 Subject: [PATCH 051/101] TPC SCD calib send track data by default (#1474) --- DATA/common/setenv_calib.sh | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/DATA/common/setenv_calib.sh b/DATA/common/setenv_calib.sh index 0b44fe23c..bdb836509 100755 --- a/DATA/common/setenv_calib.sh +++ b/DATA/common/setenv_calib.sh @@ -36,8 +36,9 @@ if [[ $SYNCMODE != 1 ]] && has_detector_reco TPC && has_detector_reco ITS && has # additional individual settings for calibration workflows if has_detector CTP; then export CALIB_TPC_SCDCALIB_CTP_INPUT="--enable-ctp"; else export CALIB_TPC_SCDCALIB_CTP_INPUT=""; fi if [[ ${DISABLE_TRD_PH:-} == 1 ]]; then CAN_DO_CALIB_TRD_T0=0; fi -# the slot length needs to be known both on the aggregator and the processing nodes, therefore it is defined (in seconds!) here -: ${CALIB_TPC_SCDCALIB_SLOTLENGTH:=600} + +: ${CALIB_TPC_SCDCALIB_SLOTLENGTH:=600} # the slot length needs to be known both on the aggregator and the processing nodes, therefore it is defined (in seconds!) here +: ${CALIB_TPC_SCDCALIB_SENDTRKDATA:=1} # by default, we want to write the track information in addition to unbinned residuals to allow finer filtering offline if [[ $BEAMTYPE != "cosmic" ]] || [[ ${FORCECALIBRATIONS:-} == 1 ]] ; then From d24070e8be0a4e7180825f40402eee5f21e45e53 Mon Sep 17 00:00:00 2001 From: Ole Schmidt Date: Tue, 20 Feb 2024 10:31:26 +0100 Subject: [PATCH 052/101] For synthetic runs we set IS_SIMULATED_DATA=1 (#1477) --- DATA/common/setenv.sh | 1 + 1 file changed, 1 insertion(+) diff --git a/DATA/common/setenv.sh b/DATA/common/setenv.sh index b4e5152eb..74ae5640e 100755 --- a/DATA/common/setenv.sh +++ b/DATA/common/setenv.sh @@ -104,6 +104,7 @@ if [[ -z "${RAWINPUTDIR:-}" ]]; then export RAWINPUTDIR=$FILEWORKDIR; fi # if [[ -z "${EPNSYNCMODE:-}" ]]; then export EPNSYNCMODE=0; fi # Is this workflow supposed to run on EPN for sync processing? Will enable InfoLogger / metrics / fetching QC JSONs from consul... if [[ -z "${BEAMTYPE:-}" ]]; then export BEAMTYPE=PbPb; fi # Beam type, must be PbPb, pp, pPb, cosmic, technical if [[ -z "${RUNTYPE:-}" ]]; then export RUNTYPE=Standalone; fi # Run Type, standalone for local tests, otherwise PHYSICS, COSMICS, TECHNICAL, SYNTHETIC +if [[ $RUNTYPE == "SYNTHETIC" ]]; then export IS_SIMULATED_DATA=1; fi # For SYNTHETIC runs we always process simulated data if [[ -z "${IS_SIMULATED_DATA:-}" ]]; then export IS_SIMULATED_DATA=1; fi # processing simulated data if [[ -z "${IS_TRIGGERED_DATA:-}" ]]; then export IS_TRIGGERED_DATA=0; fi # processing triggered data (TPC triggered instead of continuous) if [[ -z "${CTF_DIR:-}" ]]; then CTF_DIR=$FILEWORKDIR; fi # Directory where to store CTFs From 7f48808ee5ee611abfbe0f01b363cf1954ca22c2 Mon Sep 17 00:00:00 2001 From: benedikt-voelkel Date: Tue, 20 Feb 2024 10:40:41 +0100 Subject: [PATCH 053/101] Add tool to fetch PR information based on assigned labels (#1478) * applicable to different repos * distinguishes between merged and other (simply closed or still open) PRs * dumps the output into a simple text file for further proessing Co-authored-by: Benedikt Volkel --- UTILS/o2dpg_make_github_pr_report.py | 141 +++++++++++++++++++++++++++ 1 file changed, 141 insertions(+) create mode 100755 UTILS/o2dpg_make_github_pr_report.py diff --git a/UTILS/o2dpg_make_github_pr_report.py b/UTILS/o2dpg_make_github_pr_report.py new file mode 100755 index 000000000..b6a4ac9c9 --- /dev/null +++ b/UTILS/o2dpg_make_github_pr_report.py @@ -0,0 +1,141 @@ +#!/usr/bin/env python3 + +# Get list of PRs from provided repo that have a certain label assigned +# Can be used to figure out which PRs should be ported + +import sys +import argparse +import requests + + +def organise_prs(prs): + """ + Sort PRs by time merged, starting from old to recent + """ + # collect merged PRs + prs_merged = [] + # collect the time of merged PRs + merged_at = [] + # other PRs, open, closed and not merged + prs_other = [] + + for pr in prs: + if not pr['merged_at']: + # that has not been merged + prs_other.append(pr) + continue + # get the PR itself and the merged timestamp + prs_merged.append(pr) + merged_at.append(pr['merged_at']) + + # sort the merged PRs by their merged timestamp + prs_merged = [pr for _, pr in sorted(zip(merged_at, prs))] + + return prs_merged, prs_other + + +def get_prs(owner, repo, prod_label, pr_state, include_unmerged, per_page=50, start_page=1, pages=1): + """ + Get PRs according to some selection + """ + # GitHub API endpoint for listing closed pull requests with a specific label + merged_token = '&is:merged=true' if not include_unmerged else '' + prs_return = [] + + has_error = False + for page in range(start_page, pages + 1): + url = f'https://api.github.com/repos/{owner}/{repo}/pulls?state={pr_state}{merged_token}&page={page}&per_page={per_page}' + print(f'Fetch PRs accrodring to {url}') + + # Send GET request to GitHub API + response = requests.get(url) + + # Check if the request was successful (status code 200) + if response.status_code == 200: + # Parse JSON response + prs = response.json() + # PRs to return because we filter on a specific label + for pr in prs: + labels = pr['labels'] + accept = False + for label in labels: + if label['name'] == prod_label: + # only with the correct the label will be accepted + accept = True + break + if not accept: + continue + # we will end up here if accepted, so append + prs_return.append(pr) + + else: + print(f'Failed to retrieve data: {response.status_code} - {response.text}') + has_error = True + break + + if has_error: + return None, None + + # organise PRs into different lists (merged and others) + return organise_prs(prs_return) + + +def make_report(prs_merged, prs_other, outfile): + """ + Make a report + + simply dump into text file + """ + + with open(outfile, 'w') as f: + f.write('# FROM OLDEST TO RECENT\n') + # our common header + f.write('| Date of next tag | Requestor | Package | PR | Data or MC | Comment | JIRA (if it exists) | Accepted | In production | Validated by requestor |\n') + f.write('| ---------------- | ------------ | ------- | --------------------------------------------------------:|:--------------------------------------------- | ------------------- | ---------------- | ------------- |-------------| ------------------|\n') + + # first put the merged PRs + for pr in prs_merged: + mc_data = [] + + for label in pr['labels']: + if label['name'] in ('MC', 'DATA'): + # get assigned MC or DATA label if this PR has it + mc_data.append(label['name']) + + # if no specific MC or DATA label, assume valid for both + mc_data = ','.join(mc_data) if mc_data else 'MC,DATA' + # add the full line to the output file + f.write(f'| {args.date} | {pr["user"]["login"]} | {args.repo} | [PR]({pr["html_url"]}) | {mc_data} | {pr["title"]} | | | | |\n') + + # add all the other commits + f.write('OTHER PRs\n') + for pr in prs_other: + f.write(f'| {args.date} | {pr["user"]["login"]} | {args.repo} | [PR]({pr["html_url"]}) | | {pr["title"]} | | | | |\n') + + +if __name__ == '__main__': + # Parse command-line arguments + parser = argparse.ArgumentParser(description='Retrieve closed pull requests with a specific label from a GitHub repository') + parser.add_argument('--owner', help='GitHub repository owner', default='AliceO2Group') + parser.add_argument('--repo', required=True, help='GitHub repository name, e.g. O2DPG or AliceO2') + parser.add_argument('--prod-label', dest='prod_label', required=True, help='Production label to filter PRs') + parser.add_argument('--pr-state', dest='pr_state', default='closed', help='The state of the PR') + parser.add_argument('--include-unmerged', dest='include_unmerged', action='store_true', help='To fetch also unmerged PRs') + parser.add_argument('--output', default='o2dpg_pr_report.txt') + parser.add_argument('--date', help='The date tag to be put', required=True) + parser.add_argument('--per-page', dest='per_page', default=50, help='How many results per page') + parser.add_argument('--start-page', dest='start_page', type=int, default=1, help='Start on this page') + parser.add_argument('--pages', type=int, default=1, help='Number of pages') + + + args = parser.parse_args() + + # Retrieve closed pull requests with the specified label + prs_merged, prs_other = get_prs(args.owner, args.repo, args.prod_label, args.pr_state, args.include_unmerged, args.per_page, args.start_page, args.pages) + if prs_merged is None: + print('ERROR: There was a problem fetching the info.') + sys.exit(1) + + make_report(prs_merged, prs_other, args.output) + + sys.exit(0) From ed7cc8640f26e1098f5ac1e467b57806400ba59c Mon Sep 17 00:00:00 2001 From: benedikt-voelkel Date: Tue, 20 Feb 2024 11:22:06 +0100 Subject: [PATCH 054/101] [Anchor] Add test for anchored MC (#1464) Runs a very simple anchored production, 2TFs, 50 pp events each Co-authored-by: Benedikt Volkel --- MC/run/ANCHOR/anchorMC.sh | 2 + .../tests/test_anchor_2023_apass2_PbPb.sh | 36 ++++++++++ .../tests/test_anchor_2023_apass2_pp.sh | 39 +++++++++++ test/common/utils/utils.sh | 2 +- test/run_workflow_tests.sh | 67 +++++++++++++++++-- 5 files changed, 139 insertions(+), 7 deletions(-) create mode 100755 MC/run/ANCHOR/tests/test_anchor_2023_apass2_PbPb.sh create mode 100755 MC/run/ANCHOR/tests/test_anchor_2023_apass2_pp.sh diff --git a/MC/run/ANCHOR/anchorMC.sh b/MC/run/ANCHOR/anchorMC.sh index b5c0422c3..2bab586de 100755 --- a/MC/run/ANCHOR/anchorMC.sh +++ b/MC/run/ANCHOR/anchorMC.sh @@ -241,6 +241,8 @@ if [[ -z "${DISABLE_QC}" && "${MCRC}" = "0" && "${remainingargs}" == *"--include # do QC tasks echo "Doing QC" ${O2DPG_ROOT}/MC/bin/o2_dpg_workflow_runner.py -f workflow.json --target-labels QC --cpu-limit ${ALIEN_JDL_CPULIMIT:-8} -k + # NOTE that with the -k|--keep-going option, the runner will try to keep on executing even if some tasks fail. + # That means, even if there is a failing QC task, the return code will be 0 MCRC=$? fi diff --git a/MC/run/ANCHOR/tests/test_anchor_2023_apass2_PbPb.sh b/MC/run/ANCHOR/tests/test_anchor_2023_apass2_PbPb.sh new file mode 100755 index 000000000..095908e4b --- /dev/null +++ b/MC/run/ANCHOR/tests/test_anchor_2023_apass2_PbPb.sh @@ -0,0 +1,36 @@ +#!/bin/bash + +# +# An example steering script for anchored MC simulations, PbPb +# + +# example anchoring +# taken from https://its.cern.ch/jira/browse/O2-4586 +export ALIEN_JDL_LPMANCHORPASSNAME=apass2 +export ALIEN_JDL_MCANCHOR=apass2 +export ALIEN_JDL_COLLISIONSYSTEM=Pb-Pb +export ALIEN_JDL_CPULIMIT=8 +export ALIEN_JDL_LPMPASSNAME=apass2 +export ALIEN_JDL_LPMRUNNUMBER=544121 +export ALIEN_JDL_LPMPRODUCTIONTYPE=MC +export ALIEN_JDL_LPMINTERACTIONTYPE=PbPb +export ALIEN_JDL_LPMPRODUCTIONTAG=LHC24a1 +export ALIEN_JDL_LPMANCHORRUN=544121 +export ALIEN_JDL_LPMANCHORPRODUCTION=LHC23zzh +export ALIEN_JDL_LPMANCHORYEAR=2023 + +export NTIMEFRAMES=2 +export NSIGEVENTS=2 +export SPLITID=100 +export PRODSPLIT=153 +export CYCLE=0 + +# on the GRID, this is set, for our use case, we can mimic any job ID +export ALIEN_PROC_ID=2963436952 + +# run the central anchor steering script; this includes +# * derive timestamp +# * derive interaction rate +# * extract and prepare configurations (which detectors are contained in the run etc.) +# * run the simulation (and QC) +${O2DPG_ROOT}/MC/run/ANCHOR/anchorMC.sh diff --git a/MC/run/ANCHOR/tests/test_anchor_2023_apass2_pp.sh b/MC/run/ANCHOR/tests/test_anchor_2023_apass2_pp.sh new file mode 100755 index 000000000..e528b8a0a --- /dev/null +++ b/MC/run/ANCHOR/tests/test_anchor_2023_apass2_pp.sh @@ -0,0 +1,39 @@ +#!/bin/bash + +# +# An example steering script for anchored MC simulations, pp +# + +# example anchoring +# taken from https://its.cern.ch/jira/browse/O2-4586 +export ALIEN_JDL_LPMANCHORPASSNAME=apass2 +export ALIEN_JDL_MCANCHOR=apass2 +export ALIEN_JDL_COLLISIONSYSTEM=p-p +export ALIEN_JDL_CPULIMIT=8 +export ALIEN_JDL_LPMPASSNAME=apass2 +export ALIEN_JDL_LPMRUNNUMBER=535069 +export ALIEN_JDL_LPMPRODUCTIONTYPE=MC +export ALIEN_JDL_LPMINTERACTIONTYPE=pp +export ALIEN_JDL_LPMPRODUCTIONTAG=LHC24a2 +export ALIEN_JDL_LPMANCHORRUN=535069 +export ALIEN_JDL_LPMANCHORPRODUCTION=LHC23f +export ALIEN_JDL_LPMANCHORYEAR=2023 + +export NTIMEFRAMES=2 +export NSIGEVENTS=50 +export SPLITID=100 +export PRODSPLIT=153 +export CYCLE=0 + +# on the GRID, this is set, for our use case, we can mimic any job ID +export ALIEN_PROC_ID=2963436952 + +# for pp and 50 events per TF, we launch only 4 workers. +export NWORKERS=4 + +# run the central anchor steering script; this includes +# * derive timestamp +# * derive interaction rate +# * extract and prepare configurations (which detectors are contained in the run etc.) +# * run the simulation (and QC) +${O2DPG_ROOT}/MC/run/ANCHOR/anchorMC.sh diff --git a/test/common/utils/utils.sh b/test/common/utils/utils.sh index e2cae2171..0c34c5395 100644 --- a/test/common/utils/utils.sh +++ b/test/common/utils/utils.sh @@ -78,7 +78,7 @@ make_wf_creation_script() print_error_logs() { local search_dir=${1} - local search_pattern="TASK-EXIT-CODE: ([1-9][0-9]*)|[Ss]egmentation violation|[Ee]xception caught|\[FATAL\]|uncaught exception|\(int\) ([1-9][0-9]*)|fair::FatalException" + local search_pattern="TASK-EXIT-CODE: ([1-9][0-9]*)|[Ss]egmentation violation|[Ss]egmentation fault|Program crashed|[Ee]xception caught|\[FATAL\]|uncaught exception|\(int\) ([1-9][0-9]*)|fair::FatalException" local error_files=$(find ${search_dir} -maxdepth 4 -type f \( -name "*.log" -or -name "*serverlog*" -or -name "*workerlog*" -or -name "*mergerlog*" \) | xargs grep -l -E "${search_pattern}" | sort) for ef in ${error_files} ; do echo_red "Error found in log $(realpath ${ef})" diff --git a/test/run_workflow_tests.sh b/test/run_workflow_tests.sh index c4a2daeaa..adc610393 100755 --- a/test/run_workflow_tests.sh +++ b/test/run_workflow_tests.sh @@ -3,12 +3,14 @@ # The test parent dir to be cretaed in current directory TEST_PARENT_DIR_PWG="o2dpg_tests/workflows_pwgs" TEST_PARENT_DIR_BIN="o2dpg_tests/workflows_bin" +TEST_PARENT_DIR_ANCHORED="o2dpg_tests/anchored" # a global counter for tests TEST_COUNTER=0 # unified names of log files LOG_FILE_WF="o2dpg-test-wf.log" +LOG_FILE_ANCHORED="o2dpg-test-anchored.log" # Prepare some colored output SRED="\033[0;31m" @@ -123,6 +125,26 @@ run_workflow_creation() return ${RET} } +test_anchored() +{ + local to_run="${1:-${O2DPG_ROOT}/MC/run/ANCHOR/tests/test_anchor_2023_apass2_pp.sh}" + local RET=0 + for anchored_script in ${to_run} ; do + [[ ! -f ${anchored_script} ]] && { echo "Desired test script ${anchored_script} does not exist. Skip." ; continue ; } + ((TEST_COUNTER++)) + local test_dir=${TEST_COUNTER}_$(basename ${anchored_script})_dir + rm -rf ${test_dir} 2> /dev/null + mkdir ${test_dir} + pushd ${test_dir} > /dev/null + echo -n "Test ${TEST_COUNTER}: ${anchored_script}" + ${anchored_script} >> ${LOG_FILE_ANCHORED} 2>&1 + local ret_this=${?} + [[ "${ret_this}" != "0" ]] && RET=${ret_this} + popd > /dev/null + done + return ${RET} +} + collect_changed_pwg_wf_files() { # Collect all INI files which have changed @@ -188,8 +210,9 @@ source ${REPO_DIR}/test/common/utils/utils.sh pushd ${REPO_DIR} > /dev/null # flag if anything changed in the sim workflow bin dir -changed_wf_bin=$(get_changed_files | grep "MC/bin") +changed_wf_bin=$(get_changed_files | grep -E "MC/bin") changed_wf_bin_related=$(get_changed_files | grep -E "MC/analysis_testing|MC/config/analysis_testing/json|MC/config/QC/json") +changed_anchored_related=$(get_changed_files | grep -E "MC/run/ANCHOR/anchorMC.sh|MC/run/ANCHOR/tests|MC/bin|UTILS/parse-async-WorkflowConfig.py") # collect what has changed for PWGs @@ -215,6 +238,27 @@ REPO_DIR=$(realpath ${REPO_DIR}) export O2DPG_ROOT=${REPO_DIR} +############### +# ANCHORED MC # +############### +# prepare our local test directory for PWG tests +rm -rf ${TEST_PARENT_DIR_ANCHORED} 2>/dev/null +mkdir -p ${TEST_PARENT_DIR_ANCHORED} 2>/dev/null +pushd ${TEST_PARENT_DIR_ANCHORED} > /dev/null + +# global return code for PWGs +ret_global_anchored=0 +if [[ "${changed_anchored_related}" != "" ]] ; then + echo "### Test anchored ###" + # Run an anchored test + test_anchored + ret_global_anchored=${?} + echo +fi + +# return to where we came from +popd > /dev/null + ######## # PWGs # ######## @@ -226,7 +270,7 @@ pushd ${TEST_PARENT_DIR_PWG} > /dev/null # global return code for PWGs ret_global_pwg=0 if [[ "${changed_wf_bin}" != "" ]] ; then - # Run all the PWG related WF creations, hence overwrite what was collected by collect_changed_pwg_wf_files eal=rlier + # Run all the PWG related WF creations, hence overwrite what was collected by collect_changed_pwg_wf_files earlier WF_FILES=$(get_all_workflows "MC/run/.*/") echo fi @@ -240,7 +284,6 @@ if [[ "${WF_FILES}" != "" ]] ; then echo fi - # return to where we came from popd > /dev/null @@ -285,9 +328,21 @@ if [[ "${ret_global_bin}" != "0" ]] ; then echo "###################################" echo print_error_logs ${TEST_PARENT_DIR_BIN} - exit ${ret_global_bin} fi +# However, if a central test fails, exit code will be !=0 +if [[ "${ret_global_anchored}" != "0" ]] ; then + echo + echo "##########################" + echo "# ERROR for anchored MCs #" + echo "##########################" + echo + print_error_logs ${TEST_PARENT_DIR_ANCHORED} +fi + +RET=$(( ret_global_bin + ret_global_anchored )) + echo -echo_green "All required workflow tests successful" -echo +[[ "${RET}" != "0" ]] && echo "There were errors, please check!" || echo_green "All required workflow tests successful" + +exit ${RET} From 070a0f30bf57a53b44ace67876afd7bbbe2de915 Mon Sep 17 00:00:00 2001 From: benedikt-voelkel Date: Tue, 20 Feb 2024 12:32:08 +0100 Subject: [PATCH 055/101] [SimCI] Change $@ to $* (#1480) Co-authored-by: Benedikt Volkel --- test/run_analysisqc_tests.sh | 4 ++-- test/run_generator_tests.sh | 4 ++-- test/run_relval_tests.sh | 4 ++-- test/run_workflow_tests.sh | 4 ++-- 4 files changed, 8 insertions(+), 8 deletions(-) diff --git a/test/run_analysisqc_tests.sh b/test/run_analysisqc_tests.sh index fab8dd083..bd57493fd 100755 --- a/test/run_analysisqc_tests.sh +++ b/test/run_analysisqc_tests.sh @@ -14,13 +14,13 @@ SEND="\033[0m" echo_green() { - echo -e "${SGREEN}$@${SEND}" + echo -e "${SGREEN}${*}${SEND}" } echo_red() { - echo -e "${SRED}$@${SEND}" + echo -e "${SRED}${*}${SEND}" } diff --git a/test/run_generator_tests.sh b/test/run_generator_tests.sh index 34efd5443..d5a4d3c74 100755 --- a/test/run_generator_tests.sh +++ b/test/run_generator_tests.sh @@ -37,13 +37,13 @@ SEND="\033[0m" echo_green() { - echo -e "${SGREEN}$@${SEND}" + echo -e "${SGREEN}${*}${SEND}" } echo_red() { - echo -e "${SRED}$@${SEND}" + echo -e "${SRED}${*}${SEND}" } diff --git a/test/run_relval_tests.sh b/test/run_relval_tests.sh index 6294e1361..a14c3e0ed 100755 --- a/test/run_relval_tests.sh +++ b/test/run_relval_tests.sh @@ -14,13 +14,13 @@ SEND="\033[0m" echo_green() { - echo -e "${SGREEN}$@${SEND}" + echo -e "${SGREEN}${*}${SEND}" } echo_red() { - echo -e "${SRED}$@${SEND}" + echo -e "${SRED}${*}${SEND}" } diff --git a/test/run_workflow_tests.sh b/test/run_workflow_tests.sh index adc610393..e96b07141 100755 --- a/test/run_workflow_tests.sh +++ b/test/run_workflow_tests.sh @@ -20,13 +20,13 @@ SEND="\033[0m" echo_green() { - echo -e "${SGREEN}$@${SEND}" + echo -e "${SGREEN}${*}${SEND}" } echo_red() { - echo -e "${SRED}$@${SEND}" + echo -e "${SRED}${*}${SEND}" } get_git_repo_directory() From eb41cb7c60d1a1cf5544e4e321dbb298a7ae19ea Mon Sep 17 00:00:00 2001 From: benedikt-voelkel Date: Tue, 20 Feb 2024 12:40:52 +0100 Subject: [PATCH 056/101] [RelVa] Fix imported function name (#1481) Co-authored-by: Benedikt Volkel --- RelVal/o2dpg_overlay_plots.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/RelVal/o2dpg_overlay_plots.py b/RelVal/o2dpg_overlay_plots.py index 9f9a420b7..12682ca0b 100755 --- a/RelVal/o2dpg_overlay_plots.py +++ b/RelVal/o2dpg_overlay_plots.py @@ -17,7 +17,7 @@ o2dpg_release_validation = importlib.util.module_from_spec(spec) spec.loader.exec_module(o2dpg_release_validation) sys.modules["o2dpg_release_validation"] = o2dpg_release_validation -from o2dpg_release_validation import only_extract_impl +from o2dpg_release_validation import extract_and_flatten spec = importlib.util.spec_from_file_location("o2dpg_release_validation_plot", join(O2DPG_ROOT, "RelVal", "utils", 'o2dpg_release_validation_plot.py')) o2dpg_release_validation_plot = importlib.util.module_from_spec(spec) @@ -39,7 +39,7 @@ def run(args): ref_file = None for i, (input_file, label) in enumerate(zip(args.inputs, args.labels)): - _, config = only_extract_impl(input_file, args.output, label, prefix=i, reference_extracted=ref_file) + _, config = extract_and_flatten(input_file, args.output, label, prefix=i, reference_extracted=ref_file) if not config: print(f"ERROR: Problem with input file {input_file}, cannot extract") return 1 From 27b9a48382e6cd1ed4597d281fde7dcf09d82938 Mon Sep 17 00:00:00 2001 From: Ole Schmidt Date: Tue, 20 Feb 2024 14:18:46 +0100 Subject: [PATCH 057/101] More bash syntax check fixes (#1479) --- DATA/production/calib/mch-badchannel-processing.sh | 1 - DATA/production/calib/tpc-pulser-long.sh | 1 - DATA/production/calib/tpc-pulser.sh | 1 - .../configurations/2021/ctf_recreation/ctf_recreation.sh | 2 +- .../configurations/2022/LHC22f/apass1/async_pass.sh | 4 ++-- 5 files changed, 3 insertions(+), 6 deletions(-) diff --git a/DATA/production/calib/mch-badchannel-processing.sh b/DATA/production/calib/mch-badchannel-processing.sh index 771aeab87..738020262 100755 --- a/DATA/production/calib/mch-badchannel-processing.sh +++ b/DATA/production/calib/mch-badchannel-processing.sh @@ -1,4 +1,3 @@ - #!/bin/bash source common/setenv.sh diff --git a/DATA/production/calib/tpc-pulser-long.sh b/DATA/production/calib/tpc-pulser-long.sh index e1aaab4d4..e7124fd28 100755 --- a/DATA/production/calib/tpc-pulser-long.sh +++ b/DATA/production/calib/tpc-pulser-long.sh @@ -1,4 +1,3 @@ - #!/usr/bin/env bash source common/setenv.sh diff --git a/DATA/production/calib/tpc-pulser.sh b/DATA/production/calib/tpc-pulser.sh index 6b68a030b..6b62e559f 100755 --- a/DATA/production/calib/tpc-pulser.sh +++ b/DATA/production/calib/tpc-pulser.sh @@ -1,4 +1,3 @@ - #!/usr/bin/env bash source common/setenv.sh diff --git a/DATA/production/configurations/2021/ctf_recreation/ctf_recreation.sh b/DATA/production/configurations/2021/ctf_recreation/ctf_recreation.sh index bbf361c95..be5085e9f 100755 --- a/DATA/production/configurations/2021/ctf_recreation/ctf_recreation.sh +++ b/DATA/production/configurations/2021/ctf_recreation/ctf_recreation.sh @@ -72,7 +72,7 @@ fi # if "$DETCONFIG" is set explicitly, this has the second highest priority # last option is to have it from the JDL if [[ -z "$DETCONFIG" ]]; then - if [[ -z "ALIEN_JDL_DETCONFIG" ]]; then + if [[ -z "$ALIEN_JDL_DETCONFIG" ]]; then echo "nothing set the detector configuration to use, exiting" exit 4 else diff --git a/DATA/production/configurations/2022/LHC22f/apass1/async_pass.sh b/DATA/production/configurations/2022/LHC22f/apass1/async_pass.sh index 3dc46a02c..ce764852f 100755 --- a/DATA/production/configurations/2022/LHC22f/apass1/async_pass.sh +++ b/DATA/production/configurations/2022/LHC22f/apass1/async_pass.sh @@ -504,7 +504,7 @@ else echo "nCTFsFilesInspected_step1 = $nCTFsFilesInspected_step1, nCTFsFilesInspected_step2 = $nCTFsFilesInspected_step2" > validation_error.message echo "nCTFsFilesOK_step1 = $nCTFsFilesOK_step1, nCTFsFilesOK_step2 = $nCTFsFilesOK_step2" > validation_error.message echo "nCTFsProcessed_step1 = $nCTFsProcessed_step1, nCTFsProcessed_step2 = $nCTFsProcessed_step2" > validation_error.message - exit 1000 + exit 255 fi fi fi @@ -663,7 +663,7 @@ if [[ $ALIEN_JDL_AODOFF != 1 ]]; then CURRENT_POOL_SIZE=`jobs -r | wc -l` done < $JOB_LIST # collecting return codes of the merging processes - for i in ${!arr[@]}; do + for i in "${!arr[@]}"; do wait ${arr[$i]} exitcode=$? if [[ $exitcode -ne 0 ]]; then From 6d2d239238e99971d9856682d97e5f71fea7bcfb Mon Sep 17 00:00:00 2001 From: Timo Wilken Date: Tue, 20 Feb 2024 14:22:18 +0100 Subject: [PATCH 058/101] Fix Bash syntax issues found by shellcheck (#1482) Fix issues found in https://github.com/AliceO2Group/O2DPG/pull/1469, in addition to those fixed in https://github.com/AliceO2Group/O2DPG/pull/1479. --- GRID/utils/extractErroredLogFiles.sh | 4 ++-- GRID/utils/grid_submit.sh | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/GRID/utils/extractErroredLogFiles.sh b/GRID/utils/extractErroredLogFiles.sh index 81c21839d..20a0750ba 100755 --- a/GRID/utils/extractErroredLogFiles.sh +++ b/GRID/utils/extractErroredLogFiles.sh @@ -4,13 +4,13 @@ # Beware that errors might occur outside of O2DPG tasks such as in preprocessing etc or not visible in logs mytar () { - tar $@ + tar "$@" } if [[ $(uname) == "Darwin" ]]; then echo "Running on macOS. This needs gnu-tar" $(which gtar) mytar () { - gtar $@ + gtar "$@" } fi diff --git a/GRID/utils/grid_submit.sh b/GRID/utils/grid_submit.sh index 7913f83ad..f364bed68 100755 --- a/GRID/utils/grid_submit.sh +++ b/GRID/utils/grid_submit.sh @@ -401,7 +401,7 @@ EOF spin[1]="|" spin[0]="\\" JOBSTATUS="I" - if [ "{WAITFORALIEN}" ]; then + if [ "${WAITFORALIEN}" ]; then echo -n "Waiting for jobs to return ... Last status : ${spin[0]} ${JOBSTATUS}" fi counter=0 From ba2ab9450929a2c14706ca87b3235fde3a285a22 Mon Sep 17 00:00:00 2001 From: Timo Wilken Date: Tue, 20 Feb 2024 16:54:14 +0100 Subject: [PATCH 059/101] Add syntax checkers for Bash and Python (#1469) --- .github/workflows/check-json-syntax.yml | 37 ---------- .github/workflows/syntax-checks.yml | 95 +++++++++++++++++++++++++ 2 files changed, 95 insertions(+), 37 deletions(-) delete mode 100644 .github/workflows/check-json-syntax.yml create mode 100644 .github/workflows/syntax-checks.yml diff --git a/.github/workflows/check-json-syntax.yml b/.github/workflows/check-json-syntax.yml deleted file mode 100644 index fae51ae62..000000000 --- a/.github/workflows/check-json-syntax.yml +++ /dev/null @@ -1,37 +0,0 @@ ---- -name: Validate JSON syntax - -# Run on any commit or PR that changes any JSON file. -'on': - push: - paths: - - '**.json' - pull_request: - paths: - - '**.json' - -permissions: {} - -jobs: - json-syntax: - name: validate syntax - runs-on: ubuntu-latest - - steps: - - name: Checkout code - uses: actions/checkout@v4 - - - name: Validate syntax for JSON files - run: | - error=0 - readarray -d '' json_files < \ - <(find . \( -path ./.git -or -path ./DATA/testing/private \) -prune -false -or -type f -name '*.json' -print0) - for jsonf in "${json_files[@]}"; do - echo "::debug::Checking $jsonf..." - if ! errmsg=$(jq . "$jsonf" 2>&1 >/dev/null); then - error=1 - echo "Invalid JSON syntax found in $jsonf:" >&2 - printf '::error file=%s,title=%s::%s\n' "$jsonf" 'Invalid JSON syntax' "$errmsg" - fi - done - exit "$error" diff --git a/.github/workflows/syntax-checks.yml b/.github/workflows/syntax-checks.yml new file mode 100644 index 000000000..54164102d --- /dev/null +++ b/.github/workflows/syntax-checks.yml @@ -0,0 +1,95 @@ +--- +name: Validate syntax + +'on': + - push + - pull_request + +permissions: {} + +jobs: + json-syntax: + name: JSON + runs-on: ubuntu-latest + + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Validate syntax for JSON files + run: | + error=0 + readarray -d '' json_files < \ + <(find . \( -path ./.git -or -path ./DATA/testing/private \) -prune -false -or -type f -name '*.json' -print0) + for jsonf in "${json_files[@]}"; do + echo "::debug::Checking $jsonf..." + if ! errmsg=$(jq . "$jsonf" 2>&1 >/dev/null); then + error=1 + echo "Invalid JSON syntax found in $jsonf:" >&2 + printf '::error file=%s,title=%s::%s\n' "$jsonf" 'Invalid JSON syntax' "$errmsg" + fi + done + exit "$error" + + bash-syntax: + name: Bash + runs-on: ubuntu-latest + + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Validate syntax with bash -n + run: | + error=0 + readarray -d '' files < \ + <(find . -path ./.git -prune -false -or -type f -name '*.sh' -print0) + for bashf in "${files[@]}"; do + echo "::debug::Checking $bashf..." + if ! errmsg=$(bash -n "$bashf" 2>&1 >/dev/null); then + error=1 + echo "Invalid Bash syntax found in $bashf:" >&2 + printf '::error file=%s,title=%s::%s\n' "$bashf" 'Invalid syntax' "$errmsg" + fi + done + exit "$error" + + shellcheck: + name: Shellcheck + runs-on: ubuntu-latest + + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Run Shellcheck to find errors + run: | + error=0 + find . -path ./.git -prune -false -or -type f -name '*.sh' -print0 | + xargs -0 shellcheck -xf json1 -S error -s bash > errors.json || error=$? + # Produce code annotations in GitHub's format. + jq -r '.comments[] | "Error found in \(.file) line \(.line):\n::error file=\(.file),line=\(.line),endLine=\(.endLine),col=\(.column),endColumn=\(.endColumn)::\(.message)"' errors.json + exit "$error" + + pylint: + name: Pylint + runs-on: ubuntu-latest + + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Install prerequisites + run: | + sudo apt update -y + sudo apt install -y pylint + + - name: Run Pylint to find errors + run: | + error=0 + find . -path ./.git -prune -false -or -type f -name '*.py' -print0 | + # "import-errors" are shown for valid modules like ROOT, so ignore them. + xargs -0 pylint -E -f json --disable import-error > errors.json || error=$? + # Produce code annotations in GitHub's format. + jq -r '.[] | "Error found in \(.path) line \(.line):\n::error file=\(.path),line=\(.line),endLine=\(.endLine),col=\(.column),endColumn=\(.endColumn),title=Pylint \(.type) \(.symbol)::\(.message)"' errors.json + exit "$error" From fe36cb8f8e27faee1f39c45b3912931d0905a77c Mon Sep 17 00:00:00 2001 From: Ole Schmidt Date: Tue, 20 Feb 2024 16:10:48 +0100 Subject: [PATCH 060/101] Set beam type dependent defaults for TPC_CORR_SCALING --- DATA/common/setenv.sh | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/DATA/common/setenv.sh b/DATA/common/setenv.sh index 74ae5640e..ae7e276f2 100755 --- a/DATA/common/setenv.sh +++ b/DATA/common/setenv.sh @@ -110,7 +110,6 @@ if [[ -z "${IS_TRIGGERED_DATA:-}" ]]; then export IS_TRIGGERED_DATA=0; fi if [[ -z "${CTF_DIR:-}" ]]; then CTF_DIR=$FILEWORKDIR; fi # Directory where to store CTFs if [[ -z "${CALIB_DIR:-}" ]]; then CALIB_DIR="/dev/null"; fi # Directory where to store output from calibration workflows, /dev/null : skip their writing if [[ -z "${EPN2EOS_METAFILES_DIR:-}" ]]; then EPN2EOS_METAFILES_DIR="/dev/null"; fi # Directory where to store epn2eos files metada, /dev/null : skip their writing -if [[ -z "${TPC_CORR_SCALING:-}" ]]; then export TPC_CORR_SCALING=""; fi # TPC corr.map lumi scaling options, any combination of --lumi-type <0,1,2> --corrmap-lumi-mode <0,1> and TPCCorrMap... configurable param if [[ $EPNSYNCMODE == 0 ]]; then if [[ -z "${SHMSIZE:-}" ]]; then export SHMSIZE=$(( 8 << 30 )); fi # Size of shared memory for messages if [[ -z "${NGPUS:-}" ]]; then export NGPUS=1; fi # Number of GPUs to use, data distributed round-robin @@ -164,6 +163,13 @@ DISABLE_ROOT_INPUT="--disable-root-input" : ${DISABLE_DIGIT_CLUSTER_INPUT="--clusters-from-upstream"} # Special detector related settings +if [[ -z "${TPC_CORR_SCALING:-}" ]]; then # TPC corr.map lumi scaling options, any combination of --lumi-type <0,1,2> --corrmap-lumi-mode <0,1> and TPCCorrMap... configurable param + TPC_CORR_SCALING= + if [[ $BEAMTYPE == "pp" ]] || [[ $BEAMTYPE == "PbPb" ]]; then TPC_CORR_SCALING+="--lumi-type 1 TPCCorrMap.lumiInstFactor=2.414"; fi + if [[ $BEAMTYPE == "cosmic" ]]; then TPC_CORR_SCALING=" TPCCorrMap.lumiMean=-1;"; fi # for COSMICS we disable all corrections + export TPC_CORR_SCALING=$TPC_CORR_SCALING +fi + MID_FEEID_MAP="$FILEWORKDIR/mid-feeId_mapper.txt" ITSMFT_STROBES="" From 7313180ebb96573bc6fc08d48f5b5e852cec702e Mon Sep 17 00:00:00 2001 From: Ole Schmidt Date: Tue, 20 Feb 2024 16:11:14 +0100 Subject: [PATCH 061/101] ED_VERTEX_MODE has been removed --- DATA/production/workflow-multiplicities.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/DATA/production/workflow-multiplicities.sh b/DATA/production/workflow-multiplicities.sh index 81db5f102..275200d4f 100644 --- a/DATA/production/workflow-multiplicities.sh +++ b/DATA/production/workflow-multiplicities.sh @@ -222,7 +222,7 @@ if [[ -z ${EVE_NTH_EVENT:-} ]]; then EVE_NTH_EVENT=2 elif [[ "$HIGH_RATE_PP" == "1" ]]; then EVE_NTH_EVENT=10 - elif [[ $BEAMTYPE == "pp" && "${ED_VERTEX_MODE:-}" == "1" ]]; then + elif [[ $BEAMTYPE == "pp" ]]; then EVE_NTH_EVENT=$((4 * 250 / $RECO_NUM_NODES_WORKFLOW_CMP)) else # COSMICS / TECHNICALS / ... EVE_NTH_EVENT=1 From 62aaa86dcc059b172221debf96ca8e2d1033e9b2 Mon Sep 17 00:00:00 2001 From: Mattia Faggin Date: Wed, 21 Feb 2024 20:07:14 +0100 Subject: [PATCH 062/101] Change eta cut for TPC tracks in GLO QC. (#1487) Co-authored-by: Mattia Faggin --- DATA/production/qc-async/itstpc.json | 2 +- MC/config/QC/json/ITSTPCmatchedTracks_direct_MC.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/DATA/production/qc-async/itstpc.json b/DATA/production/qc-async/itstpc.json index 5bce54d2e..b294b8cf9 100644 --- a/DATA/production/qc-async/itstpc.json +++ b/DATA/production/qc-async/itstpc.json @@ -49,7 +49,7 @@ "minNITSClustersCut": "0", "maxChi2PerClusterITS": "100000", "minPtTPCCut": "0.1f", - "etaTPCCut": "1.4f", + "etaTPCCut": "0.9f", "minNTPCClustersCut": "60", "minDCACut": "100.f", "minDCACutY": "10.f", diff --git a/MC/config/QC/json/ITSTPCmatchedTracks_direct_MC.json b/MC/config/QC/json/ITSTPCmatchedTracks_direct_MC.json index a005c00de..27dff87b8 100644 --- a/MC/config/QC/json/ITSTPCmatchedTracks_direct_MC.json +++ b/MC/config/QC/json/ITSTPCmatchedTracks_direct_MC.json @@ -46,7 +46,7 @@ "minNITSClustersCut": "0", "maxChi2PerClusterITS": "100000", "minPtTPCCut": "0.1f", - "etaTPCCut": "1.4f", + "etaTPCCut": "0.9f", "minNTPCClustersCut": "60", "minDCACut": "100.f", "minDCACutY": "10.f", From 30afe4927ae7bf1ad556305aaecd1c1dde9588a4 Mon Sep 17 00:00:00 2001 From: Ole Schmidt Date: Thu, 22 Feb 2024 15:13:22 +0100 Subject: [PATCH 063/101] Enable IDC and SAC processing by default (#1485) * Enable IDC and SAC processing by default * Add missing fi --- DATA/common/setenv_calib.sh | 20 +++++++++++++++----- 1 file changed, 15 insertions(+), 5 deletions(-) diff --git a/DATA/common/setenv_calib.sh b/DATA/common/setenv_calib.sh index bdb836509..ae07638c6 100755 --- a/DATA/common/setenv_calib.sh +++ b/DATA/common/setenv_calib.sh @@ -76,15 +76,25 @@ if [[ $BEAMTYPE != "cosmic" ]] || [[ ${FORCECALIBRATIONS:-} == 1 ]] ; then if [[ $CAN_DO_CALIB_TPC_VDRIFTTGL == 1 ]]; then if [[ -z ${CALIB_TPC_VDRIFTTGL+x} ]]; then CALIB_TPC_VDRIFTTGL=1; fi fi - # IDCs + # IDCs (by default we enable it for running the synch. reco on the EPNs) if [[ $CAN_DO_CALIB_TPC_IDC == 1 ]]; then - if [[ -z ${CALIB_TPC_IDC+x} ]] || [[ $CALIB_TPC_IDC == 0 ]]; then - CALIB_TPC_IDC=0; # default is off + if [[ -z ${CALIB_TPC_IDC+x} ]]; then + if [[ $EPNSYNCMODE == 1 ]]; then + CALIB_TPC_IDC=1; + else + CALIB_TPC_IDC=0; + fi fi fi - # SAC + # SAC (by default we enable it for running the synch. reco on the EPNs) if [[ $CAN_DO_CALIB_TPC_SAC == 1 ]]; then - if [[ -z ${CALIB_TPC_SAC+x} ]]; then CALIB_TPC_SAC=0; fi # default is off + if [[ -z ${CALIB_TPC_SAC+x} ]]; then + if [[ $EPNSYNCMODE == 1 ]]; then + CALIB_TPC_SAC=1; + else + CALIB_TPC_SAC=0; + fi + fi fi # calibrations for TRD From 6d4924eef63aaf6f4922b2fe2691fffbd0387fb7 Mon Sep 17 00:00:00 2001 From: fmazzasc Date: Wed, 21 Feb 2024 14:55:11 +0100 Subject: [PATCH 064/101] Update hypernuclei gun for pp --- MC/config/PWGLF/pythia8/generator/hypernuclei.gun | 14 +++++++------- MC/run/PWGLF/run_HyperNucleiInjectedGap.sh | 2 +- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/MC/config/PWGLF/pythia8/generator/hypernuclei.gun b/MC/config/PWGLF/pythia8/generator/hypernuclei.gun index 55af09ba2..af21f0faf 100644 --- a/MC/config/PWGLF/pythia8/generator/hypernuclei.gun +++ b/MC/config/PWGLF/pythia8/generator/hypernuclei.gun @@ -1,8 +1,8 @@ # PDG N ptMin ptMax yMin yMax -1000010020 1 0.2 6 -1 1 -1000010030 1 0.2 6 -1 1 -1000020030 1 0.2 6 -1 1 -1000020040 1 0.2 6 -1 1 -1010010030 1 0.2 6 -1 1 -1010010040 1 0.2 6 -1 1 -1010020040 1 0.2 6 -1 1 +1000010020 1 0.2 10 -1 1 +1000010030 1 0.2 10 -1 1 +1000020030 1 0.2 10 -1 1 +1000020040 1 0.2 10 -1 1 +1010010030 1 0.2 10 -1 1 +1010010040 1 0.2 10 -1 1 +1010020040 1 0.2 10 -1 1 diff --git a/MC/run/PWGLF/run_HyperNucleiInjectedGap.sh b/MC/run/PWGLF/run_HyperNucleiInjectedGap.sh index 027f6d0a0..cbf094547 100644 --- a/MC/run/PWGLF/run_HyperNucleiInjectedGap.sh +++ b/MC/run/PWGLF/run_HyperNucleiInjectedGap.sh @@ -15,7 +15,7 @@ NWORKERS=${NWORKERS:-8} MODULES="--skipModules ZDC" -SIMENGINE=${SIMENGINE:-TGeant3} +SIMENGINE=${SIMENGINE:-TGeant4} NSIGEVENTS=${NSIGEVENTS:-1} NBKGEVENTS=${NBKGEVENTS:-1} NTIMEFRAMES=${NTIMEFRAMES:-1} From 31573f5c3ef96868fe8ce33f1c157a390fa67657 Mon Sep 17 00:00:00 2001 From: fmazzasc Date: Tue, 20 Feb 2024 17:55:49 +0100 Subject: [PATCH 065/101] Update hypernuclei gun for pbpb --- MC/config/PWGLF/ini/GeneratorLFHyperNucleiPbPbGap.ini | 2 +- MC/config/PWGLF/pythia8/generator/hypernuclei_pbpb.gun | 8 ++++++++ 2 files changed, 9 insertions(+), 1 deletion(-) create mode 100644 MC/config/PWGLF/pythia8/generator/hypernuclei_pbpb.gun diff --git a/MC/config/PWGLF/ini/GeneratorLFHyperNucleiPbPbGap.ini b/MC/config/PWGLF/ini/GeneratorLFHyperNucleiPbPbGap.ini index f548c57cf..feceea039 100644 --- a/MC/config/PWGLF/ini/GeneratorLFHyperNucleiPbPbGap.ini +++ b/MC/config/PWGLF/ini/GeneratorLFHyperNucleiPbPbGap.ini @@ -1,6 +1,6 @@ [GeneratorExternal] fileName=${O2DPG_ROOT}/MC/config/PWGLF/pythia8/generator_pythia8_longlived_gaptriggered.C -funcName=generateLongLivedGapTriggered({1000010020, 1000010030, 1000020030, 1000020040, 1010010030}, 5, 10) +funcName=generateLongLivedGapTriggered("${O2DPG_ROOT}/MC/config/PWGLF/pythia8/generator/hypernuclei.gun", 1) [GeneratorPythia8] config=${O2_ROOT}/share/Generators/egconfig/pythia8_hi.cfg diff --git a/MC/config/PWGLF/pythia8/generator/hypernuclei_pbpb.gun b/MC/config/PWGLF/pythia8/generator/hypernuclei_pbpb.gun new file mode 100644 index 000000000..d15d11554 --- /dev/null +++ b/MC/config/PWGLF/pythia8/generator/hypernuclei_pbpb.gun @@ -0,0 +1,8 @@ +# PDG N ptMin ptMax yMin yMax +1000010020 4 0.2 10 -1 1 +1000010030 4 0.2 10 -1 1 +1000020030 4 0.2 10 -1 1 +1000020040 4 0.2 10 -1 1 +1010010030 4 0.2 10 -1 1 +1010010040 4 0.2 10 -1 1 +1010020040 4 0.2 10 -1 1 From 763bc731607f5710a9d31b8dafa7608b3c9517f5 Mon Sep 17 00:00:00 2001 From: fmazzasc Date: Tue, 20 Feb 2024 17:57:38 +0100 Subject: [PATCH 066/101] fix --- MC/config/PWGLF/ini/GeneratorLFHyperNucleiPbPbGap.ini | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/MC/config/PWGLF/ini/GeneratorLFHyperNucleiPbPbGap.ini b/MC/config/PWGLF/ini/GeneratorLFHyperNucleiPbPbGap.ini index feceea039..4acc1d268 100644 --- a/MC/config/PWGLF/ini/GeneratorLFHyperNucleiPbPbGap.ini +++ b/MC/config/PWGLF/ini/GeneratorLFHyperNucleiPbPbGap.ini @@ -1,6 +1,6 @@ [GeneratorExternal] fileName=${O2DPG_ROOT}/MC/config/PWGLF/pythia8/generator_pythia8_longlived_gaptriggered.C -funcName=generateLongLivedGapTriggered("${O2DPG_ROOT}/MC/config/PWGLF/pythia8/generator/hypernuclei.gun", 1) +funcName=generateLongLivedGapTriggered("${O2DPG_ROOT}/MC/config/PWGLF/pythia8/generator/hypernuclei_pbpb.gun", 1) [GeneratorPythia8] config=${O2_ROOT}/share/Generators/egconfig/pythia8_hi.cfg From c9f8bc00168c7461ec73326ecd7a2cbe5fe72de5 Mon Sep 17 00:00:00 2001 From: fmazzasc Date: Tue, 20 Feb 2024 18:04:24 +0100 Subject: [PATCH 067/101] Increase number of injected particles --- .../PWGLF/pythia8/generator/hypernuclei_pbpb.gun | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/MC/config/PWGLF/pythia8/generator/hypernuclei_pbpb.gun b/MC/config/PWGLF/pythia8/generator/hypernuclei_pbpb.gun index d15d11554..cc68fa63b 100644 --- a/MC/config/PWGLF/pythia8/generator/hypernuclei_pbpb.gun +++ b/MC/config/PWGLF/pythia8/generator/hypernuclei_pbpb.gun @@ -1,8 +1,8 @@ # PDG N ptMin ptMax yMin yMax -1000010020 4 0.2 10 -1 1 -1000010030 4 0.2 10 -1 1 -1000020030 4 0.2 10 -1 1 -1000020040 4 0.2 10 -1 1 -1010010030 4 0.2 10 -1 1 -1010010040 4 0.2 10 -1 1 -1010020040 4 0.2 10 -1 1 +1000010020 10 0.2 10 -1 1 +1000010030 10 0.2 10 -1 1 +1000020030 10 0.2 10 -1 1 +1000020040 10 0.2 10 -1 1 +1010010030 10 0.2 10 -1 1 +1010010040 10 0.2 10 -1 1 +1010020040 10 0.2 10 -1 1 From c399748f3c79bc39f06d46604101a1e55914d71a Mon Sep 17 00:00:00 2001 From: fmazzasc Date: Thu, 22 Feb 2024 15:31:02 +0100 Subject: [PATCH 068/101] Inject more particles per event --- .../PWGLF/pythia8/generator/hypernuclei_pbpb.gun | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/MC/config/PWGLF/pythia8/generator/hypernuclei_pbpb.gun b/MC/config/PWGLF/pythia8/generator/hypernuclei_pbpb.gun index cc68fa63b..3b4a8d274 100644 --- a/MC/config/PWGLF/pythia8/generator/hypernuclei_pbpb.gun +++ b/MC/config/PWGLF/pythia8/generator/hypernuclei_pbpb.gun @@ -1,8 +1,8 @@ # PDG N ptMin ptMax yMin yMax -1000010020 10 0.2 10 -1 1 -1000010030 10 0.2 10 -1 1 -1000020030 10 0.2 10 -1 1 -1000020040 10 0.2 10 -1 1 -1010010030 10 0.2 10 -1 1 -1010010040 10 0.2 10 -1 1 -1010020040 10 0.2 10 -1 1 +1000010020 20 0.2 10 -1 1 +1000010030 20 0.2 10 -1 1 +1000020030 20 0.2 10 -1 1 +1000020040 20 0.2 10 -1 1 +1010010030 20 0.2 10 -1 1 +1010010040 20 0.2 10 -1 1 +1010020040 20 0.2 10 -1 1 From 045895f64490c42027828618818b5e3f5bbf3981 Mon Sep 17 00:00:00 2001 From: benedikt-voelkel Date: Fri, 23 Feb 2024 14:41:07 +0100 Subject: [PATCH 069/101] Add asyn-label workflows for PRs (#1489) * Add asyn-label workflows for PRs --------- Co-authored-by: Benedikt Volkel --- .github/workflows/async-auto-label.yml | 18 ++++++++++++++++++ .github/workflows/async-list-label.yml | 20 ++++++++++++++++++++ 2 files changed, 38 insertions(+) create mode 100644 .github/workflows/async-auto-label.yml create mode 100644 .github/workflows/async-list-label.yml diff --git a/.github/workflows/async-auto-label.yml b/.github/workflows/async-auto-label.yml new file mode 100644 index 000000000..8ec0659ca --- /dev/null +++ b/.github/workflows/async-auto-label.yml @@ -0,0 +1,18 @@ +--- +name: Apply requested async label + +'on': + issue_comment: + types: + - created + - edited + +permissions: + pull-requests: write # to update labels + +jobs: + apply_async_labels: + name: Apply requested async label + uses: alisw/ali-bot/.github/workflows/async-auto-label.yml@master + permissions: + pull-requests: write # to update labels diff --git a/.github/workflows/async-list-label.yml b/.github/workflows/async-list-label.yml new file mode 100644 index 000000000..97144b6b1 --- /dev/null +++ b/.github/workflows/async-list-label.yml @@ -0,0 +1,20 @@ +--- +name: Collect and print async labels + +'on': + pull_request: + types: + - opened + - reopened + branches: + - master + +permissions: + pull-requests: write # to update labels + +jobs: + list_async_labels: + name: Collect and print async labels + uses: alisw/ali-bot/.github/workflows/async-list-label.yml@master + permissions: + pull-requests: write # to update labels From e8511152b4dd58dbc3098b380d17cc468761d17c Mon Sep 17 00:00:00 2001 From: Ole Schmidt Date: Fri, 23 Feb 2024 15:40:23 +0100 Subject: [PATCH 070/101] Topology generation fixes for staging (#1491) * Only enable CTP lumi scaling if its available * Disable CALIB_TPC_IDC on staging due to lack of resources --- DATA/common/setenv.sh | 2 +- DATA/common/setenv_calib.sh | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/DATA/common/setenv.sh b/DATA/common/setenv.sh index ae7e276f2..61c817a54 100755 --- a/DATA/common/setenv.sh +++ b/DATA/common/setenv.sh @@ -165,7 +165,7 @@ DISABLE_ROOT_INPUT="--disable-root-input" # Special detector related settings if [[ -z "${TPC_CORR_SCALING:-}" ]]; then # TPC corr.map lumi scaling options, any combination of --lumi-type <0,1,2> --corrmap-lumi-mode <0,1> and TPCCorrMap... configurable param TPC_CORR_SCALING= - if [[ $BEAMTYPE == "pp" ]] || [[ $BEAMTYPE == "PbPb" ]]; then TPC_CORR_SCALING+="--lumi-type 1 TPCCorrMap.lumiInstFactor=2.414"; fi + if ( [[ $BEAMTYPE == "pp" ]] || [[ $BEAMTYPE == "PbPb" ]] ) && has_detector CTP; then TPC_CORR_SCALING+="--lumi-type 1 TPCCorrMap.lumiInstFactor=2.414"; fi if [[ $BEAMTYPE == "cosmic" ]]; then TPC_CORR_SCALING=" TPCCorrMap.lumiMean=-1;"; fi # for COSMICS we disable all corrections export TPC_CORR_SCALING=$TPC_CORR_SCALING fi diff --git a/DATA/common/setenv_calib.sh b/DATA/common/setenv_calib.sh index ae07638c6..95db74f33 100755 --- a/DATA/common/setenv_calib.sh +++ b/DATA/common/setenv_calib.sh @@ -76,10 +76,10 @@ if [[ $BEAMTYPE != "cosmic" ]] || [[ ${FORCECALIBRATIONS:-} == 1 ]] ; then if [[ $CAN_DO_CALIB_TPC_VDRIFTTGL == 1 ]]; then if [[ -z ${CALIB_TPC_VDRIFTTGL+x} ]]; then CALIB_TPC_VDRIFTTGL=1; fi fi - # IDCs (by default we enable it for running the synch. reco on the EPNs) + # IDCs (by default we enable it for running the synch. reco on the EPNs, but not on staging since we have only 1 calibration node available) if [[ $CAN_DO_CALIB_TPC_IDC == 1 ]]; then if [[ -z ${CALIB_TPC_IDC+x} ]]; then - if [[ $EPNSYNCMODE == 1 ]]; then + if [[ $EPNSYNCMODE == 1 ]] && [[ "${GEN_TOPO_DEPLOYMENT_TYPE:-}" != "ALICE_STAGING" ]]; then CALIB_TPC_IDC=1; else CALIB_TPC_IDC=0; From fbbe5cd3002e47430e632840bba3c60db7067516 Mon Sep 17 00:00:00 2001 From: benedikt-voelkel Date: Fri, 23 Feb 2024 15:43:46 +0100 Subject: [PATCH 071/101] [WF] Take back unneccessary permissions (#1492) Co-authored-by: Benedikt Volkel --- .github/workflows/async-auto-label.yml | 3 +-- .github/workflows/async-list-label.yml | 3 +-- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/.github/workflows/async-auto-label.yml b/.github/workflows/async-auto-label.yml index 8ec0659ca..b0a17c7fa 100644 --- a/.github/workflows/async-auto-label.yml +++ b/.github/workflows/async-auto-label.yml @@ -7,8 +7,7 @@ name: Apply requested async label - created - edited -permissions: - pull-requests: write # to update labels +permissions: {} jobs: apply_async_labels: diff --git a/.github/workflows/async-list-label.yml b/.github/workflows/async-list-label.yml index 97144b6b1..6b6f6f57d 100644 --- a/.github/workflows/async-list-label.yml +++ b/.github/workflows/async-list-label.yml @@ -9,8 +9,7 @@ name: Collect and print async labels branches: - master -permissions: - pull-requests: write # to update labels +permissions: {} jobs: list_async_labels: From 77bda58b7e2f0b9988966bb6f74a6d793d3836c6 Mon Sep 17 00:00:00 2001 From: benedikt-voelkel Date: Fri, 23 Feb 2024 17:37:13 +0100 Subject: [PATCH 072/101] [WF] Raise write permission (#1495) --- .github/workflows/async-list-label.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/async-list-label.yml b/.github/workflows/async-list-label.yml index 6b6f6f57d..97144b6b1 100644 --- a/.github/workflows/async-list-label.yml +++ b/.github/workflows/async-list-label.yml @@ -9,7 +9,8 @@ name: Collect and print async labels branches: - master -permissions: {} +permissions: + pull-requests: write # to update labels jobs: list_async_labels: From 1ce5e589d811175cbd5fccfa56e37bc74362d896 Mon Sep 17 00:00:00 2001 From: shahoian Date: Fri, 23 Feb 2024 17:30:20 +0100 Subject: [PATCH 073/101] Use alio2-cr1-flp199-ib:8083 as DCS-CCDB server, define in setenv.sh --- DATA/common/setenv.sh | 2 ++ DATA/production/calib/hmp-pedestals-processing.sh | 2 +- DATA/production/calib/its-noise-aggregator.sh | 2 +- DATA/production/calib/its-threshold-aggregator.sh | 2 +- DATA/production/calib/mch-badchannel-aggregator.sh | 2 +- DATA/production/calib/mft-noise-aggregator.sh | 2 +- DATA/production/calib/mid-badchannels.sh | 2 +- DATA/testing/detectors/MID/mid-calib-workflow.sh | 2 +- 8 files changed, 9 insertions(+), 7 deletions(-) diff --git a/DATA/common/setenv.sh b/DATA/common/setenv.sh index 61c817a54..18307ebf2 100755 --- a/DATA/common/setenv.sh +++ b/DATA/common/setenv.sh @@ -110,6 +110,8 @@ if [[ -z "${IS_TRIGGERED_DATA:-}" ]]; then export IS_TRIGGERED_DATA=0; fi if [[ -z "${CTF_DIR:-}" ]]; then CTF_DIR=$FILEWORKDIR; fi # Directory where to store CTFs if [[ -z "${CALIB_DIR:-}" ]]; then CALIB_DIR="/dev/null"; fi # Directory where to store output from calibration workflows, /dev/null : skip their writing if [[ -z "${EPN2EOS_METAFILES_DIR:-}" ]]; then EPN2EOS_METAFILES_DIR="/dev/null"; fi # Directory where to store epn2eos files metada, /dev/null : skip their writing +if [[ -z "${DCSCCDBSERVER:-}" ]]; then export DCSCCDBSERVER="http://alio2-cr1-flp199-ib:8083"; fi # server for transvering calibration data to DCS + if [[ $EPNSYNCMODE == 0 ]]; then if [[ -z "${SHMSIZE:-}" ]]; then export SHMSIZE=$(( 8 << 30 )); fi # Size of shared memory for messages if [[ -z "${NGPUS:-}" ]]; then export NGPUS=1; fi # Number of GPUs to use, data distributed round-robin diff --git a/DATA/production/calib/hmp-pedestals-processing.sh b/DATA/production/calib/hmp-pedestals-processing.sh index f7c48d737..b14d38660 100755 --- a/DATA/production/calib/hmp-pedestals-processing.sh +++ b/DATA/production/calib/hmp-pedestals-processing.sh @@ -57,7 +57,7 @@ fi SPEC_PARAM="" if [ $HMP_NODCSCCDB_REC == 'false' ]; then - SPEC_PARAM+="--use-dcsccdb --dcsccdb-uri 'http://alio2-cr1-flp199.cern.ch:8083' --dcsccdb-alivehours 3 " + SPEC_PARAM+="--use-dcsccdb --dcsccdb-uri $DCSCCDBSERVER --dcsccdb-alivehours 3 " fi if [ $HMP_CCDB_REC == 'true' ]; then diff --git a/DATA/production/calib/its-noise-aggregator.sh b/DATA/production/calib/its-noise-aggregator.sh index 97bfcf696..e6aeb04d9 100755 --- a/DATA/production/calib/its-noise-aggregator.sh +++ b/DATA/production/calib/its-noise-aggregator.sh @@ -17,7 +17,7 @@ fi if [[ -z $NTHREADS ]] ; then NTHREADS=1; fi CCDBPATH1="http://o2-ccdb.internal" -CCDBPATH2="http://alio2-cr1-flp199.cern.ch:8083" +CCDBPATH2="$DCSCCDBSERVER" if [[ $RUNTYPE == "SYNTHETIC" || "${GEN_TOPO_DEPLOYMENT_TYPE:-}" == "ALICE_STAGING" || ! -z $ISTEST ]]; then CCDBPATH1="http://ccdb-test.cern.ch:8080" CCDBPATH2="http://ccdb-test.cern.ch:8080" diff --git a/DATA/production/calib/its-threshold-aggregator.sh b/DATA/production/calib/its-threshold-aggregator.sh index cc788d79e..81e1b11c6 100755 --- a/DATA/production/calib/its-threshold-aggregator.sh +++ b/DATA/production/calib/its-threshold-aggregator.sh @@ -11,7 +11,7 @@ PROXY_INSPEC="tunestring:ITS/TSTR;runtype:ITS/RUNT;fittype:ITS/FITT;scantype:ITS CCDBPATH1="" CCDBPATH2="" if [ $RUNTYPE_ITS == "tuning" ] || [ $RUNTYPE_ITS == "digital" ] || [ $RUNTYPE_ITS == "tuningbb" ]; then - CCDBPATH1="http://alio2-cr1-flp199.cern.ch:8083" + CCDBPATH1="$DCSCCDBSERVER" CCDBPATH2="http://o2-ccdb.internal" else CCDBPATH1="http://o2-ccdb.internal" diff --git a/DATA/production/calib/mch-badchannel-aggregator.sh b/DATA/production/calib/mch-badchannel-aggregator.sh index e362e373a..8af400b9d 100755 --- a/DATA/production/calib/mch-badchannel-aggregator.sh +++ b/DATA/production/calib/mch-badchannel-aggregator.sh @@ -20,7 +20,7 @@ BADCHANNEL_CONFIG="${ARGS_ALL_CONFIG};MCHBadChannelCalibratorParam.maxPed=${MCH_ WORKFLOW="o2-dpl-raw-proxy $ARGS_ALL --proxy-name mch-badchannel-input-proxy --dataspec \"$PROXY_INSPEC\" --network-interface ib0 --channel-config \"name=mch-badchannel-input-proxy,method=bind,type=pull,rateLogging=0,transport=zeromq\" | " WORKFLOW+="o2-calibration-mch-badchannel-calib-workflow $ARGS_ALL --configKeyValues \"$BADCHANNEL_CONFIG\" | " WORKFLOW+="o2-calibration-ccdb-populator-workflow $ARGS_ALL --configKeyValues \"$ARGS_ALL_CONFIG\" --ccdb-path=\"http://o2-ccdb.internal\" --sspec-min 0 --sspec-max 0 | " -WORKFLOW+="o2-calibration-ccdb-populator-workflow $ARGS_ALL --configKeyValues \"$ARGS_ALL_CONFIG\" --ccdb-path=\"http://ali-calib-dcs.cern.ch:8083\" --sspec-min 1 --sspec-max 1 --name-extention dcs | " +WORKFLOW+="o2-calibration-ccdb-populator-workflow $ARGS_ALL --configKeyValues \"$ARGS_ALL_CONFIG\" --ccdb-path=\"$DCSCCDBSERVER\" --sspec-min 1 --sspec-max 1 --name-extention dcs | " add_QC_from_consul "/o2/components/qc/ANY/any/mch-badchannel" "" WORKFLOW+="o2-dpl-run $ARGS_ALL $GLOBALDPLOPT" diff --git a/DATA/production/calib/mft-noise-aggregator.sh b/DATA/production/calib/mft-noise-aggregator.sh index 3fa908025..8c2ef1290 100755 --- a/DATA/production/calib/mft-noise-aggregator.sh +++ b/DATA/production/calib/mft-noise-aggregator.sh @@ -11,7 +11,7 @@ PROXY_INSPEC="A:MFT/DIGITS/0;B:MFT/DIGITSROF/0" WORKFLOW="o2-dpl-raw-proxy $ARGS_ALL --proxy-name mft-noise-input-proxy --dataspec \"$PROXY_INSPEC\" --network-interface ib0 --channel-config \"name=mft-noise-input-proxy,method=bind,type=pull,rateLogging=0,transport=zeromq\" | " WORKFLOW+="o2-calibration-mft-calib-workflow $ARGS_ALL --configKeyValues \"$ARGS_ALL_CONFIG\" --useDigits --prob-threshold 1e-5 --send-to-server DCS-CCDB --path-CCDB \"/MFT/Calib/NoiseMap\" --path-DCS \"/MFT/Config/NoiseMap\" | " WORKFLOW+="o2-calibration-ccdb-populator-workflow $ARGS_ALL --configKeyValues \"$ARGS_ALL_CONFIG\" --ccdb-path=\"http://o2-ccdb.internal\" --sspec-min 0 --sspec-max 0 | " -WORKFLOW+="o2-calibration-ccdb-populator-workflow $ARGS_ALL --configKeyValues \"$ARGS_ALL_CONFIG\" --ccdb-path=\"http://ali-calib-dcs.cern.ch:8083\" --sspec-min 1 --sspec-max 1 --name-extention dcs | " +WORKFLOW+="o2-calibration-ccdb-populator-workflow $ARGS_ALL --configKeyValues \"$ARGS_ALL_CONFIG\" --ccdb-path=\"$DCSCCDBSERVER\" --sspec-min 1 --sspec-max 1 --name-extention dcs | " WORKFLOW+="o2-dpl-run $ARGS_ALL $GLOBALDPLOPT" if [ $WORKFLOWMODE == "print" ]; then diff --git a/DATA/production/calib/mid-badchannels.sh b/DATA/production/calib/mid-badchannels.sh index 6060b4116..edc18eaec 100755 --- a/DATA/production/calib/mid-badchannels.sh +++ b/DATA/production/calib/mid-badchannels.sh @@ -26,7 +26,7 @@ if [[ -z ${CCDB_POPULATOR_UPLOAD_PATH} ]]; then CCDB_POPULATOR_UPLOAD_PATH_DCS="$CCDB_POPULATOR_UPLOAD_PATH" else CCDB_POPULATOR_UPLOAD_PATH="http://o2-ccdb.internal" - CCDB_POPULATOR_UPLOAD_PATH_DCS="http://alio2-cr1-flp199.cern.ch:8083" + CCDB_POPULATOR_UPLOAD_PATH_DCS="$DCSCCDBSERVER" fi fi if [[ "${GEN_TOPO_VERBOSE:-}" == "1" ]]; then diff --git a/DATA/testing/detectors/MID/mid-calib-workflow.sh b/DATA/testing/detectors/MID/mid-calib-workflow.sh index e846ea94f..a9ad5a7a7 100755 --- a/DATA/testing/detectors/MID/mid-calib-workflow.sh +++ b/DATA/testing/detectors/MID/mid-calib-workflow.sh @@ -7,7 +7,7 @@ WORKFLOW="o2-dpl-raw-proxy $ARGS_ALL --dataspec \"$MID_RAW_PROXY_INSPEC\" --inje WORKFLOW+="o2-mid-raw-to-digits-workflow $ARGS_ALL $MID_RAW_TO_DIGITS_OPTS | " WORKFLOW+="o2-mid-calibration-workflow $ARGS_ALL | " WORKFLOW+="o2-calibration-ccdb-populator-workflow $ARGS_ALL --configKeyValues \"$ARGS_ALL_CONFIG\" --ccdb-path=\"http://o2-ccdb.internal\" --sspec-min 0 --sspec-max 0 | " -WORKFLOW+="o2-calibration-ccdb-populator-workflow $ARGS_ALL --configKeyValues \"$ARGS_ALL_CONFIG\" --ccdb-path=\"http://alio2-cr1-flp199.cern.ch:8083\" --sspec-min 1 --sspec-max 1 --name-extention dcs | " +WORKFLOW+="o2-calibration-ccdb-populator-workflow $ARGS_ALL --configKeyValues \"$ARGS_ALL_CONFIG\" --ccdb-path=\"${DCSCCDBSERVER:-http://alio2-cr1-flp199-ib:8083}\" --sspec-min 1 --sspec-max 1 --name-extention dcs | " WORKFLOW+="o2-dpl-run $ARGS_ALL $GLOBALDPLOPT" if [ "$WORKFLOWMODE" == "print" ]; then From 340b4fe7720e0b87b377d1a16910f4adf2f5daa8 Mon Sep 17 00:00:00 2001 From: benedikt-voelkel Date: Fri, 23 Feb 2024 17:56:21 +0100 Subject: [PATCH 074/101] [WF] Run on pull_request_target (#1496) Co-authored-by: Benedikt Volkel --- .github/workflows/async-list-label.yml | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/.github/workflows/async-list-label.yml b/.github/workflows/async-list-label.yml index 97144b6b1..b53ec6e6e 100644 --- a/.github/workflows/async-list-label.yml +++ b/.github/workflows/async-list-label.yml @@ -2,15 +2,14 @@ name: Collect and print async labels 'on': - pull_request: + pull_request_target: types: - opened - reopened branches: - master -permissions: - pull-requests: write # to update labels +permissions: {} jobs: list_async_labels: From a09b91ff6db85564d63d3f186b0aebdf01956978 Mon Sep 17 00:00:00 2001 From: Francesco Mazzaschi <43742195+fmazzasc@users.noreply.github.com> Date: Tue, 27 Feb 2024 13:44:03 +0100 Subject: [PATCH 075/101] Enable strangeness tracking in strange particle simulations (#1499) --- MC/run/PWGLF/run_HyperNucleiInjectedGap.sh | 2 +- MC/run/PWGLF/run_StrangenessInjected.sh | 2 +- MC/run/PWGLF/run_StrangenessTriggered.sh | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/MC/run/PWGLF/run_HyperNucleiInjectedGap.sh b/MC/run/PWGLF/run_HyperNucleiInjectedGap.sh index cbf094547..04659f024 100644 --- a/MC/run/PWGLF/run_HyperNucleiInjectedGap.sh +++ b/MC/run/PWGLF/run_HyperNucleiInjectedGap.sh @@ -26,7 +26,7 @@ ENERGY=${ENERGY:-13600} # create workflow ${O2DPG_ROOT}/MC/bin/o2dpg_sim_workflow.py -eCM ${ENERGY} -col ${SYSTEM} -gen external -j ${NWORKERS} -ns ${NSIGEVENTS} -tf ${NTIMEFRAMES} -interactionRate ${INTRATE} -confKey "Diamond.width[0]=0.1;Diamond.width[1]=0.1;Diamond.width[2]=6." -e ${SIMENGINE} ${SEED} -mod "--skipModules ZDC" \ - -ini ${O2DPG_ROOT}/MC/config/PWGLF/ini/GeneratorLFHyperNuclei${SYSTEM}Gap.ini + -ini ${O2DPG_ROOT}/MC/config/PWGLF/ini/GeneratorLFHyperNuclei${SYSTEM}Gap.ini --with-strangeness-tracking # run workflow # allow increased timeframe parallelism with --cpu-limit 32 diff --git a/MC/run/PWGLF/run_StrangenessInjected.sh b/MC/run/PWGLF/run_StrangenessInjected.sh index 9c07c9271..e8cb17068 100755 --- a/MC/run/PWGLF/run_StrangenessInjected.sh +++ b/MC/run/PWGLF/run_StrangenessInjected.sh @@ -35,7 +35,7 @@ O2_SIM_WORKFLOW=${O2_SIM_WORKFLOW:-"${O2DPG_ROOT}/MC/bin/o2dpg_sim_workflow.py"} $O2_SIM_WORKFLOW -eCM ${ENERGY} -col ${SYSTEM} -gen external \ -j ${NWORKERS} \ -ns ${NSIGEVENTS} -tf ${NTIMEFRAMES} -interactionRate ${INTRATE} \ - -confKey "Diamond.width[2]=6." \ + -confKey "Diamond.width[2]=6." --with-strangeness-tracking \ ${SEED} \ -procBkg "inel" -colBkg $SYSTEM --embedding -nb ${NBKGEVENTS} -genBkg pythia8 \ -e ${SIMENGINE} \ diff --git a/MC/run/PWGLF/run_StrangenessTriggered.sh b/MC/run/PWGLF/run_StrangenessTriggered.sh index 412edf995..39ff2ff1e 100755 --- a/MC/run/PWGLF/run_StrangenessTriggered.sh +++ b/MC/run/PWGLF/run_StrangenessTriggered.sh @@ -34,7 +34,7 @@ O2_SIM_WORKFLOW=${O2_SIM_WORKFLOW:-"${O2DPG_ROOT}/MC/bin/o2dpg_sim_workflow.py"} $O2_SIM_WORKFLOW -eCM ${ENERGY} -col ${SYSTEM} -gen external \ -j ${NWORKERS} \ -ns ${NSIGEVENTS} -tf ${NTIMEFRAMES} -interactionRate ${INTRATE} \ - -confKey "Diamond.width[2]=6." \ + -confKey "Diamond.width[2]=6." --with-strangeness-tracking \ ${SEED} \ -e ${SIMENGINE} \ -ini $CFGINIFILE From f568fd496d33b39d46bc6f6582f56269f946f2bd Mon Sep 17 00:00:00 2001 From: benedikt-voelkel Date: Tue, 27 Feb 2024 14:09:19 +0100 Subject: [PATCH 076/101] [SimCI] Make mem limit configurable in WF tests (#1502) --- test/run_workflow_tests.sh | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/test/run_workflow_tests.sh b/test/run_workflow_tests.sh index e96b07141..15746b3ce 100755 --- a/test/run_workflow_tests.sh +++ b/test/run_workflow_tests.sh @@ -74,10 +74,11 @@ test_single_wf() if [[ "${ret_this}" != "0" ]] ; then echo "[FATAL]: O2DPG_TEST Workflow creation failed" >> ${LOG_FILE_WF} elif [[ "${execute}" != "" ]] ; then - ${O2DPG_ROOT}/MC/bin/o2_dpg_workflow_runner.py -f workflow.json --cpu-limit 8 -tt aod >> ${LOG_FILE_WF} 2>&1 + local memlimit=${O2DPG_TEST_WORKFLOW_MEMLIMIT:+--mem-limit O2DPG_TEST_WORKFLOW_MEMLIMIT} + ${O2DPG_ROOT}/MC/bin/o2_dpg_workflow_runner.py -f workflow.json --cpu-limit 8 -tt aod ${memlimit} >> ${LOG_FILE_WF} 2>&1 ret_this=${?} - [[ "${ret_this}" == "0" ]] && { ${O2DPG_ROOT}/MC/bin/o2_dpg_workflow_runner.py -f workflow.json --cpu-limit 8 --target-labels QC >> ${LOG_FILE_WF} 2>&1 ; ret_this_qc=${?} ; } - [[ "${ret_this}" == "0" ]] && { ${O2DPG_ROOT}/MC/bin/o2_dpg_workflow_runner.py -f workflow.json --cpu-limit 8 --target-labels Analysis >> ${LOG_FILE_WF} 2>&1 ; ret_this_analysis=${?} ; } + [[ "${ret_this}" == "0" ]] && { ${O2DPG_ROOT}/MC/bin/o2_dpg_workflow_runner.py -f workflow.json --cpu-limit 8 --target-labels QC ${memlimit} >> ${LOG_FILE_WF} 2>&1 ; ret_this_qc=${?} ; } + [[ "${ret_this}" == "0" ]] && { ${O2DPG_ROOT}/MC/bin/o2_dpg_workflow_runner.py -f workflow.json --cpu-limit 8 --target-labels Analysis ${memlimit} >> ${LOG_FILE_WF} 2>&1 ; ret_this_analysis=${?} ; } ret_this=$((ret_this + ret_this_qc + ret_this_analysis)) [[ "${ret_this}" != "0" ]] && echo "[FATAL]: O2DPG_TEST Workflow execution failed" >> ${LOG_FILE_WF} fi @@ -173,6 +174,8 @@ print_usage() echo " If also not set, this will be set to HEAD. However, if there are unstaged" echo " changes, it will left blank." echo + echo " O2DPG_TEST_WORKFLOW_MEMLIMIT : The memory limit that is passed to the workflow runner in case a workflow is executed (optional)" + echo } while [ "$1" != "" ] ; do From 8c1886416bdef6ea80d0f0cbe976c6211e34e72a Mon Sep 17 00:00:00 2001 From: benedikt-voelkel Date: Tue, 27 Feb 2024 15:47:12 +0100 Subject: [PATCH 077/101] [SimCI] Make a variable (#1503) --- test/run_workflow_tests.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/run_workflow_tests.sh b/test/run_workflow_tests.sh index 15746b3ce..2acde102e 100755 --- a/test/run_workflow_tests.sh +++ b/test/run_workflow_tests.sh @@ -74,7 +74,7 @@ test_single_wf() if [[ "${ret_this}" != "0" ]] ; then echo "[FATAL]: O2DPG_TEST Workflow creation failed" >> ${LOG_FILE_WF} elif [[ "${execute}" != "" ]] ; then - local memlimit=${O2DPG_TEST_WORKFLOW_MEMLIMIT:+--mem-limit O2DPG_TEST_WORKFLOW_MEMLIMIT} + local memlimit=${O2DPG_TEST_WORKFLOW_MEMLIMIT:+--mem-limit ${O2DPG_TEST_WORKFLOW_MEMLIMIT}} ${O2DPG_ROOT}/MC/bin/o2_dpg_workflow_runner.py -f workflow.json --cpu-limit 8 -tt aod ${memlimit} >> ${LOG_FILE_WF} 2>&1 ret_this=${?} [[ "${ret_this}" == "0" ]] && { ${O2DPG_ROOT}/MC/bin/o2_dpg_workflow_runner.py -f workflow.json --cpu-limit 8 --target-labels QC ${memlimit} >> ${LOG_FILE_WF} 2>&1 ; ret_this_qc=${?} ; } From dfc147c8f73c02a975003baa196b00fbdca153b1 Mon Sep 17 00:00:00 2001 From: David Rohr Date: Thu, 15 Feb 2024 18:39:39 +0100 Subject: [PATCH 078/101] Revert "Temporary workaround, to be reverted once O2 PR 12412 is in all async tags" This reverts commit fd619fe62c5c4f622492992a367da6945f1d5186. --- DATA/production/configurations/asyncReco/async_pass.sh | 8 -------- 1 file changed, 8 deletions(-) diff --git a/DATA/production/configurations/asyncReco/async_pass.sh b/DATA/production/configurations/asyncReco/async_pass.sh index 9d1f49025..a49a941b0 100755 --- a/DATA/production/configurations/asyncReco/async_pass.sh +++ b/DATA/production/configurations/asyncReco/async_pass.sh @@ -371,8 +371,6 @@ if [[ -n "$ALIEN_JDL_USEGPUS" && $ALIEN_JDL_USEGPUS != 0 ]] ; then if [[ $ALIEN_JDL_UNOPTIMIZEDGPUSETTINGS != 1 ]]; then export OPTIMIZED_PARALLEL_ASYNC=pp_1gpu # sets the multiplicities to optimized defaults for this configuration (1 job with 1 gpu on EPNs) export OPTIMIZED_PARALLEL_ASYNC_AUTO_SHM_LIMIT=1 - export TIMEFRAME_RATE_LIMIT=8 # WORKAROUND: Needed until O2 PR 12412 is in all async tags - export SHMSIZE=30000000000 # WORKAROUND: Needed until O2 PR 12412 is in all async tags else # forcing multiplicities to be 1 export MULTIPLICITY_PROCESS_tof_matcher=1 @@ -392,13 +390,9 @@ if [[ -n "$ALIEN_JDL_USEGPUS" && $ALIEN_JDL_USEGPUS != 0 ]] ; then if [[ $BEAMTYPE == "pp" ]]; then export OPTIMIZED_PARALLEL_ASYNC=pp_4gpu # sets the multiplicities to optimized defaults for this configuration (1 Numa, pp) export OPTIMIZED_PARALLEL_ASYNC_AUTO_SHM_LIMIT=1 - export TIMEFRAME_RATE_LIMIT=45 # WORKAROUND: Needed until O2 PR 12412 is in all async tags - export SHMSIZE=100000000000 # WORKAROUND: Needed until O2 PR 12412 is in all async tags else # PbPb export OPTIMIZED_PARALLEL_ASYNC=PbPb_4gpu # sets the multiplicities to optimized defaults for this configuration (1 Numa, PbPb) export OPTIMIZED_PARALLEL_ASYNC_AUTO_SHM_LIMIT=1 - export TIMEFRAME_RATE_LIMIT=30 # WORKAROUND: Needed until O2 PR 12412 is in all async tags - export SHMSIZE=100000000000 # WORKAROUND: Needed until O2 PR 12412 is in all async tags fi fi fi @@ -428,8 +422,6 @@ else else export OPTIMIZED_PARALLEL_ASYNC=pp_64cpu # to use EPNs with full NUMA domain but without GPUs export OPTIMIZED_PARALLEL_ASYNC_AUTO_SHM_LIMIT=1 - export TIMEFRAME_RATE_LIMIT=32 # WORKAROUND: Needed until O2 PR 12412 is in all async tags - export SHMSIZE=90000000000 # WORKAROUND: Needed until O2 PR 12412 is in all async tags fi fi fi From ffe6861adc210e9828278702dd5b372342224685 Mon Sep 17 00:00:00 2001 From: Chiara Zampolli Date: Mon, 26 Feb 2024 14:56:46 +0100 Subject: [PATCH 079/101] Do not ask for geometry in GLO ITS-TPC and TOF matching QC --- DATA/production/qc-async/itstpc.json | 2 +- DATA/production/qc-async/itstpctof.json | 2 +- DATA/production/qc-async/itstpctofwtrd.json | 4 ++-- DATA/production/qc-sync/glo-itstpc-mtch-qcmn-epn.json | 2 +- DATA/production/qc-sync/itstpctof.json | 2 +- DATA/production/qc-sync/itstpctrdtof.json | 2 +- MC/config/QC/json/ITSTPCmatchedTracks_direct_MC.json | 4 ++-- MC/config/QC/json/tofMatchedTracks_AllTypes_direct_MC.json | 2 +- .../QC/json/tofMatchedTracks_ITSTPCTOF_TPCTOF_direct_MC.json | 2 +- 9 files changed, 11 insertions(+), 11 deletions(-) diff --git a/DATA/production/qc-async/itstpc.json b/DATA/production/qc-async/itstpc.json index b294b8cf9..59607bc4d 100644 --- a/DATA/production/qc-async/itstpc.json +++ b/DATA/production/qc-async/itstpc.json @@ -58,7 +58,7 @@ "etaCut": "1e10f" }, "grpGeomRequest" : { - "geomRequest": "Aligned", + "geomRequest": "None", "askGRPECS": "false", "askGRPLHCIF": "false", "askGRPMagField": "true", diff --git a/DATA/production/qc-async/itstpctof.json b/DATA/production/qc-async/itstpctof.json index a843da3ea..c5c8697af 100644 --- a/DATA/production/qc-async/itstpctof.json +++ b/DATA/production/qc-async/itstpctof.json @@ -46,7 +46,7 @@ "minDCACutY" : "10.f" }, "grpGeomRequest" : { - "geomRequest": "Aligned", + "geomRequest": "None", "askGRPECS": "false", "askGRPLHCIF": "false", "askGRPMagField": "true", diff --git a/DATA/production/qc-async/itstpctofwtrd.json b/DATA/production/qc-async/itstpctofwtrd.json index 55094c2ad..2c397df57 100644 --- a/DATA/production/qc-async/itstpctofwtrd.json +++ b/DATA/production/qc-async/itstpctofwtrd.json @@ -46,7 +46,7 @@ "minDCACutY" : "10.f" }, "grpGeomRequest" : { - "geomRequest": "Aligned", + "geomRequest": "None", "askGRPECS": "false", "askGRPLHCIF": "false", "askGRPMagField": "true", @@ -55,7 +55,7 @@ "askOnceAllButField": "true", "needPropagatorD": "false" }, - "saveObjectsToFile" : "TOFmatchedITSTPCTOF_TPCTOF.root", + "saveObjectsToFile" : "TOFmatchedITSTPCTOF_TPCTOF_wTRD.root", "" : "For debugging, path to the file where to save. If empty or missing it won't save." } } diff --git a/DATA/production/qc-sync/glo-itstpc-mtch-qcmn-epn.json b/DATA/production/qc-sync/glo-itstpc-mtch-qcmn-epn.json index da8964a41..53a1dca39 100644 --- a/DATA/production/qc-sync/glo-itstpc-mtch-qcmn-epn.json +++ b/DATA/production/qc-sync/glo-itstpc-mtch-qcmn-epn.json @@ -44,7 +44,7 @@ "minDCACutY" : "10.f" }, "grpGeomRequest" : { - "geomRequest": "Aligned", + "geomRequest": "None", "askGRPECS": "false", "askGRPLHCIF": "false", "askGRPMagField": "true", diff --git a/DATA/production/qc-sync/itstpctof.json b/DATA/production/qc-sync/itstpctof.json index dcc986436..fefe85c87 100644 --- a/DATA/production/qc-sync/itstpctof.json +++ b/DATA/production/qc-sync/itstpctof.json @@ -53,7 +53,7 @@ "minDCACutY" : "10.f" }, "grpGeomRequest" : { - "geomRequest": "Aligned", + "geomRequest": "None", "askGRPECS": "false", "askGRPLHCIF": "false", "askGRPMagField": "true", diff --git a/DATA/production/qc-sync/itstpctrdtof.json b/DATA/production/qc-sync/itstpctrdtof.json index 2fb6363ba..087449715 100644 --- a/DATA/production/qc-sync/itstpctrdtof.json +++ b/DATA/production/qc-sync/itstpctrdtof.json @@ -53,7 +53,7 @@ "minDCACutY" : "10.f" }, "grpGeomRequest" : { - "geomRequest": "Aligned", + "geomRequest": "None", "askGRPECS": "false", "askGRPLHCIF": "false", "askGRPMagField": "true", diff --git a/MC/config/QC/json/ITSTPCmatchedTracks_direct_MC.json b/MC/config/QC/json/ITSTPCmatchedTracks_direct_MC.json index 27dff87b8..5aba15f8a 100644 --- a/MC/config/QC/json/ITSTPCmatchedTracks_direct_MC.json +++ b/MC/config/QC/json/ITSTPCmatchedTracks_direct_MC.json @@ -56,7 +56,7 @@ "isMC": "true" }, "grpGeomRequest": { - "geomRequest": "Aligned", + "geomRequest": "None", "askGRPECS": "false", "askGRPLHCIF": "false", "askGRPMagField": "true", @@ -72,4 +72,4 @@ } }, "dataSamplingPolicies": [] -} \ No newline at end of file +} diff --git a/MC/config/QC/json/tofMatchedTracks_AllTypes_direct_MC.json b/MC/config/QC/json/tofMatchedTracks_AllTypes_direct_MC.json index 4d5acc87a..7ae8f16e6 100644 --- a/MC/config/QC/json/tofMatchedTracks_AllTypes_direct_MC.json +++ b/MC/config/QC/json/tofMatchedTracks_AllTypes_direct_MC.json @@ -49,7 +49,7 @@ "minDCACutY": "10.f" }, "grpGeomRequest": { - "geomRequest": "Aligned", + "geomRequest": "None", "askGRPECS": "false", "askGRPLHCIF": "false", "askGRPMagField": "true", diff --git a/MC/config/QC/json/tofMatchedTracks_ITSTPCTOF_TPCTOF_direct_MC.json b/MC/config/QC/json/tofMatchedTracks_ITSTPCTOF_TPCTOF_direct_MC.json index ef0d21311..95f6f9008 100644 --- a/MC/config/QC/json/tofMatchedTracks_ITSTPCTOF_TPCTOF_direct_MC.json +++ b/MC/config/QC/json/tofMatchedTracks_ITSTPCTOF_TPCTOF_direct_MC.json @@ -50,7 +50,7 @@ "minDCACutY": "10.f" }, "grpGeomRequest": { - "geomRequest": "Aligned", + "geomRequest": "None", "askGRPECS": "false", "askGRPLHCIF": "false", "askGRPMagField": "true", From 95c2b213b346569b6c93f7cde096cfd8dd5ea6ff Mon Sep 17 00:00:00 2001 From: shahoian Date: Wed, 28 Feb 2024 17:48:27 +0100 Subject: [PATCH 080/101] Move RECO_NUM_NODES_WORKFLOW_CMP definition to the beginning --- DATA/production/workflow-multiplicities.sh | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/DATA/production/workflow-multiplicities.sh b/DATA/production/workflow-multiplicities.sh index 275200d4f..dd04d93ba 100644 --- a/DATA/production/workflow-multiplicities.sh +++ b/DATA/production/workflow-multiplicities.sh @@ -30,6 +30,8 @@ if [[ $SYNCMODE == 1 ]]; then NTRDTRKTHREADS=1; else NTRDTRKTHREADS=; fi : ${NGPURECOTHREADS:=-1} # -1 = auto-detect +RECO_NUM_NODES_WORKFLOW_CMP=$((($RECO_NUM_NODES_WORKFLOW > 15 ? ($RECO_NUM_NODES_WORKFLOW < 230 ? $RECO_NUM_NODES_WORKFLOW : 230) : 15) * ($NUMAGPUIDS != 0 ? 2 : 1))) # Limit the lower scaling factor, multiply by 2 if we have 2 NUMA domains + # --------------------------------------------------------------------------------------------------------------------- # Process multiplicities @@ -157,7 +159,6 @@ elif [[ $EPNPIPELINES != 0 ]]; then NTRDTRKTHREADS=2 ITSTRK_THREADS=2 ITSTPC_THREADS=2 - RECO_NUM_NODES_WORKFLOW_CMP=$((($RECO_NUM_NODES_WORKFLOW > 15 ? ($RECO_NUM_NODES_WORKFLOW < 230 ? $RECO_NUM_NODES_WORKFLOW : 230) : 15) * ($NUMAGPUIDS != 0 ? 2 : 1))) # Limit the lower scaling factor, multiply by 2 if we have 2 NUMA domains # Tuned multiplicities for sync pp / Pb-Pb processing if [[ $BEAMTYPE == "pp" ]]; then N_ITSRAWDEC=$(math_max $((6 * $EPNPIPELINES * $NGPUS / 4)) 1) From d47944257ead124fdd7b33b1430c8a8e5956da59 Mon Sep 17 00:00:00 2001 From: Chiara Zampolli Date: Wed, 28 Feb 2024 23:48:07 +0100 Subject: [PATCH 081/101] To allow to set the trackQC sampling from the JDL, or have a subsample with full sampling --- .../configurations/asyncReco/async_pass.sh | 2 +- .../configurations/asyncReco/setenv_extra.sh | 24 ++++++++++++++++++- 2 files changed, 24 insertions(+), 2 deletions(-) diff --git a/DATA/production/configurations/asyncReco/async_pass.sh b/DATA/production/configurations/asyncReco/async_pass.sh index a49a941b0..ce6d3ce5e 100755 --- a/DATA/production/configurations/asyncReco/async_pass.sh +++ b/DATA/production/configurations/asyncReco/async_pass.sh @@ -20,7 +20,7 @@ run_AOD_merging() { timeStartFullProcessing=`date +%s` # to skip positional arg parsing before the randomizing part. -inputarg="${1}" +export inputarg="${1}" if [[ "${1##*.}" == "root" ]]; then #echo ${1##*.} diff --git a/DATA/production/configurations/asyncReco/setenv_extra.sh b/DATA/production/configurations/asyncReco/setenv_extra.sh index 4f9992a67..c1d82bbca 100644 --- a/DATA/production/configurations/asyncReco/setenv_extra.sh +++ b/DATA/production/configurations/asyncReco/setenv_extra.sh @@ -622,7 +622,29 @@ fi # ad-hoc settings for AOD echo ALIEN_JDL_LPMPRODUCTIONTAG = $ALIEN_JDL_LPMPRODUCTIONTAG echo ALIEN_JDL_LPMPASSNAME = $ALIEN_JDL_LPMPASSNAME -export ARGS_EXTRA_PROCESS_o2_aod_producer_workflow="$ARGS_EXTRA_PROCESS_o2_aod_producer_workflow --aod-writer-maxfilesize $AOD_FILE_SIZE --lpmp-prod-tag $ALIEN_JDL_LPMPRODUCTIONTAG --reco-pass $ALIEN_JDL_LPMPASSNAME" +# Track QC table sampling +if [[ -n $ALIEN_JDL_TRACKQCFRACTION ]]; then + TRACKQC_FRACTION=$ALIEN_JDL_TRACKQCFRACTION +else + if [[ $ALIEN_JDL_ENABLEPERMILFULLTRACKQC == "1" ]]; then + PERMIL_FULLTRACKQC=${ALIEN_JDL_PERMILFULLTRACKQC:-100} + INVERSE_PERMIL_FULLTRACKQC=$((1000/PERMIL_FULLTRACKQC)) + if [[ -f wn.xml ]]; then + HASHCODE=`grep alien:// wn.xml | tr ' ' '\n' | grep ^lfn | cut -d\" -f2 | head -1 | cksum | cut -d ' ' -f 1` + else + HASHCODE=`echo "${inputarg}" | cksum | cut -d ' ' -f 1` + fi + if [[ "$((HASHCODE%INVERSE_PERMIL_FULLTRACKQC))" -eq "0" ]]; then + TRACKQC_FRACTION=1 + else + TRACKQC_FRACTION=0.1 + fi + else + TRACKQC_FRACTION=0.1 + fi +fi +echo TRACKQC_FRACTION = $TRACKQC_FRACTION +export ARGS_EXTRA_PROCESS_o2_aod_producer_workflow="$ARGS_EXTRA_PROCESS_o2_aod_producer_workflow --aod-writer-maxfilesize $AOD_FILE_SIZE --lpmp-prod-tag $ALIEN_JDL_LPMPRODUCTIONTAG --reco-pass $ALIEN_JDL_LPMPASSNAME --trackqc-fraction $TRACKQC_FRACTION" if [[ $PERIOD == "LHC22c" ]] || [[ $PERIOD == "LHC22d" ]] || [[ $PERIOD == "LHC22e" ]] || [[ $PERIOD == "JUN" ]] || [[ $PERIOD == "LHC22f" ]] || [[ $PERIOD == "LHC22m" ]] || [[ "$RUNNUMBER" == @(526463|526465|526466|526467|526468|526486|526505|526508|526510|526512|526525|526526|526528|526534|526559|526596|526606|526612|526638|526639|526641|526643|526647|526649|526689|526712|526713|526714|526715|526716|526719|526720|526776|526886|526926|526927|526928|526929|526934|526935|526937|526938|526963|526964|526966|526967|526968|527015|527016|527028|527031|527033|527034|527038|527039|527041|527057|527076|527108|527109|527228|527237|527259|527260|527261|527262|527345|527347|527349|527446|527518|527523|527734) ]] ; then export ARGS_EXTRA_PROCESS_o2_aod_producer_workflow="$ARGS_EXTRA_PROCESS_o2_aod_producer_workflow --ctpreadout-create 1" fi From 2b9b08709777b7f85bdd1282eb8c06519c361c34 Mon Sep 17 00:00:00 2001 From: Chiara Zampolli Date: Thu, 29 Feb 2024 09:47:08 +0100 Subject: [PATCH 082/101] Do not split the metrics, as it is too slow, unless requested --- .../configurations/asyncReco/async_pass.sh | 32 ++++++++++--------- 1 file changed, 17 insertions(+), 15 deletions(-) diff --git a/DATA/production/configurations/asyncReco/async_pass.sh b/DATA/production/configurations/asyncReco/async_pass.sh index ce6d3ce5e..bd6e9ce89 100755 --- a/DATA/production/configurations/asyncReco/async_pass.sh +++ b/DATA/production/configurations/asyncReco/async_pass.sh @@ -610,21 +610,23 @@ else fi # now extract all performance metrics -IFS=$'\n' -timeStart=`date +%s` -for perfMetricsFiles in performanceMetrics.json performanceMetrics_1.json performanceMetrics_2.json performanceMetrics_3.json ; do - suffix=`echo $perfMetricsFiles | sed 's/performanceMetrics\(.*\).json/\1/'` - if [[ -f "performanceMetrics.json" ]]; then - for workflow in `grep ': {' $perfMetricsFiles`; do - strippedWorkflow=`echo $workflow | cut -d\" -f2` - cat $perfMetricsFiles | jq '.'\"${strippedWorkflow}\"'' > ${strippedWorkflow}_metrics${suffix}.json - done - fi -done -timeEnd=`date +%s` -timeUsed=$(( $timeUsed+$timeEnd-$timeStart )) -delta=$(( $timeEnd-$timeStart )) -echo "Time spent in splitting the metrics files = $delta s" +if [[ $ALIEN_JDL_EXTRACTMETRICS == "1" ]]; then + IFS=$'\n' + timeStart=`date +%s` + for perfMetricsFiles in performanceMetrics.json performanceMetrics_1.json performanceMetrics_2.json performanceMetrics_3.json ; do + suffix=`echo $perfMetricsFiles | sed 's/performanceMetrics\(.*\).json/\1/'` + if [[ -f "performanceMetrics.json" ]]; then + for workflow in `grep ': {' $perfMetricsFiles`; do + strippedWorkflow=`echo $workflow | cut -d\" -f2` + cat $perfMetricsFiles | jq '.'\"${strippedWorkflow}\"'' > ${strippedWorkflow}_metrics${suffix}.json + done + fi + done + timeEnd=`date +%s` + timeUsed=$(( $timeUsed+$timeEnd-$timeStart )) + delta=$(( $timeEnd-$timeStart )) + echo "Time spent in splitting the metrics files = $delta s" +fi if [[ $ALIEN_JDL_AODOFF != 1 ]]; then # flag to possibly enable Analysis QC From b04143af8dbefc4befc2853eba74a2ce63e21846 Mon Sep 17 00:00:00 2001 From: Chiara Zampolli Date: Wed, 28 Feb 2024 22:20:45 +0100 Subject: [PATCH 083/101] Running extraction of TPC time series in anchored MC --- MC/run/ANCHOR/anchorMC.sh | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/MC/run/ANCHOR/anchorMC.sh b/MC/run/ANCHOR/anchorMC.sh index 2bab586de..c30597eb6 100755 --- a/MC/run/ANCHOR/anchorMC.sh +++ b/MC/run/ANCHOR/anchorMC.sh @@ -235,6 +235,11 @@ echo "Ready to start main workflow" ${O2DPG_ROOT}/MC/bin/o2_dpg_workflow_runner.py -f workflow.json -tt ${ALIEN_JDL_O2DPGWORKFLOWTARGET:-aod} --cpu-limit ${ALIEN_JDL_CPULIMIT:-8} MCRC=$? # <--- we'll report back this code +if [[ "${ALIEN_JDL_ADDTIMESERIESINMC}" != "0" ]]; then + echo "Running TPC time series" + ${O2DPG_ROOT}/MC/bin/o2_dpg_workflow_runner.py -f workflow.json -tt tpctimes +fi + [[ ! -z "${DISABLE_QC}" ]] && echo "INFO: QC is disabled, skip it." if [[ -z "${DISABLE_QC}" && "${MCRC}" = "0" && "${remainingargs}" == *"--include-local-qc"* ]] ; then From b9cb67a0845587b3d34bb6d320711dc0478c4022 Mon Sep 17 00:00:00 2001 From: benedikt-voelkel Date: Thu, 29 Feb 2024 17:49:32 +0100 Subject: [PATCH 084/101] [Anchor] Add some more help messages (#1511) * help messages * set ALIEN_JDL_ADDTIMESERIESINMC to a value at the beginning --- MC/run/ANCHOR/anchorMC.sh | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/MC/run/ANCHOR/anchorMC.sh b/MC/run/ANCHOR/anchorMC.sh index c30597eb6..13512fea7 100755 --- a/MC/run/ANCHOR/anchorMC.sh +++ b/MC/run/ANCHOR/anchorMC.sh @@ -41,10 +41,13 @@ print_help() echo "PRODSPLIT." echo echo "Optional are:" - echo "NWORKERS," - echo "ALIEN_JDL_CPULIMIT or CPULIMIT," - echo "ALIEN_JDL_SIMENGINE or SIMENGINE." - echo "DISABLE_QC (set this to disable QC, e.g. DISABLE_QC=1)" + echo "ALIEN_JDL_CPULIMIT or CPULIMIT, set the CPU limit of the workflow runner, default: 8," + echo "NWORKERS, set the number of workers during detector transport, default: 8," + echo "ALIEN_JDL_SIMENGINE or SIMENGINE, choose the transport engine, default: TGeant4," + echo "ALIEN_JDL_WORKFLOWDETECTORS, set detectors to be taken into account, default: ITS,TPC,TOF,FV0,FT0,FDD,MID,MFT,MCH,TRD,EMC,PHS,CPV,HMP,CTP," + echo "ALIEN_JDL_ANCHOR_SIM_OPTIONS, additional options that are passed to the workflow creation, default: -gen pythia8," + echo "ALIEN_JDL_ADDTIMESERIESINMC, run TPC time series. Default: 1, switch off by setting to 0," + echo "DISABLE_QC, set this to disable QC, e.g. to 1" } # Prevent the script from being soured to omit unexpected surprises when exit is used @@ -93,6 +96,8 @@ export ALIEN_JDL_LPMPRODUCTIONTAG=${ALIEN_JDL_LPMPRODUCTIONTAG:-${PRODUCTIONTAG} export ALIEN_JDL_LPMANCHORRUN=${ALIEN_JDL_LPMANCHORRUN:-${ANCHORRUN}} export ALIEN_JDL_LPMANCHORPRODUCTION=${ALIEN_JDL_LPMANCHORPRODUCTION:-${ANCHORPRODUCTION}} export ALIEN_JDL_LPMANCHORYEAR=${ALIEN_JDL_LPMANCHORYEAR:-${ANCHORYEAR}} +# decide whether to run TPC time series; on by default, switched off by setting to 0 +export ALIEN_JDL_ADDTIMESERIESINMC=${ALIEN_JDL_ADDTIMESERIESINMC:-1} # cache the production tag, will be set to a special anchor tag; reset later in fact ALIEN_JDL_LPMPRODUCTIONTAG_KEEP=$ALIEN_JDL_LPMPRODUCTIONTAG @@ -236,6 +241,7 @@ ${O2DPG_ROOT}/MC/bin/o2_dpg_workflow_runner.py -f workflow.json -tt ${ALIEN_JDL_ MCRC=$? # <--- we'll report back this code if [[ "${ALIEN_JDL_ADDTIMESERIESINMC}" != "0" ]]; then + # Default value is 1 so this is run by default. echo "Running TPC time series" ${O2DPG_ROOT}/MC/bin/o2_dpg_workflow_runner.py -f workflow.json -tt tpctimes fi From b818d3447fc3f65c4b4cf8f33088b9232862887c Mon Sep 17 00:00:00 2001 From: shahoian Date: Thu, 29 Feb 2024 23:39:42 +0100 Subject: [PATCH 085/101] Add ITS/MFT time-dependent DeadMaps calibration --- DATA/common/setenv_calib.sh | 22 ++++++++++++++++++- .../configurations/asyncReco/setenv_extra.sh | 2 ++ 2 files changed, 23 insertions(+), 1 deletion(-) diff --git a/DATA/common/setenv_calib.sh b/DATA/common/setenv_calib.sh index 95db74f33..a2da3ec46 100755 --- a/DATA/common/setenv_calib.sh +++ b/DATA/common/setenv_calib.sh @@ -13,6 +13,8 @@ SOURCE_GUARD_SETENV_CALIB=1 # define the conditions for each calibration if has_detector_calib ITS && has_detectors_reco ITS && has_detector_matching PRIMVTX && [[ ! -z "$VERTEXING_SOURCES" ]]; then CAN_DO_CALIB_PRIMVTX_MEANVTX=1; else CAN_DO_CALIB_PRIMVTX_MEANVTX=0; fi +if has_detector_calib ITS ; then CAN_DO_CALIB_ITS_DEADMAP_TIME=1; else CAN_DO_CALIB_ITS_DEADMAP_TIME=0; fi +if has_detector_calib MFT ; then CAN_DO_CALIB_MFT_DEADMAP_TIME=1; else CAN_DO_CALIB_MFT_DEADMAP_TIME=0; fi if has_detector_calib TOF && has_detector_reco TOF; then CAN_DO_CALIB_TOF_DIAGNOSTICS=1; CAN_DO_CALIB_TOF_INTEGRATEDCURR=1; else CAN_DO_CALIB_TOF_DIAGNOSTICS=0; CAN_DO_CALIB_TOF_INTEGRATEDCURR=0; fi if has_detector_calib TOF && has_detector_reco TOF && ( ( has_detectors_reco ITS TPC && has_detector_matching ITSTPCTOF ) || ( has_detectors_reco ITS TPC TRD && has_detector_matching ITSTPCTRDTOF ) ); then CAN_DO_CALIB_TOF_LHCPHASE=1; CAN_DO_CALIB_TOF_CHANNELOFFSETS=1; else CAN_DO_CALIB_TOF_LHCPHASE=0; CAN_DO_CALIB_TOF_CHANNELOFFSETS=0; fi if has_detector_calib TPC && has_detectors ITS TPC TOF TRD && has_detector_matching ITSTPCTRDTOF; then CAN_DO_CALIB_TPC_SCDCALIB=1; else CAN_DO_CALIB_TPC_SCDCALIB=0; fi @@ -48,7 +50,17 @@ if [[ $BEAMTYPE != "cosmic" ]] || [[ ${FORCECALIBRATIONS:-} == 1 ]] ; then if [[ $CAN_DO_CALIB_PRIMVTX_MEANVTX == 1 ]]; then if [[ -z ${CALIB_PRIMVTX_MEANVTX+x} ]]; then CALIB_PRIMVTX_MEANVTX=1; fi fi - + + # calibrations for ITS + if [[ $CAN_DO_CALIB_ITS_DEADMAP_TIME == 1 ]]; then + if [[ -z ${CALIB_ITS_DEADMAP_TIME+x} ]]; then CALIB_ITS_DEADMAP_TIME=1; fi + fi + + # calibrations for MFT + if [[ $CAN_DO_CALIB_MFT_DEADMAP_TIME == 1 ]]; then + if [[ -z ${CALIB_MFT_DEADMAP_TIME+x} ]]; then CALIB_MFT_DEADMAP_TIME=1; fi + fi + # calibrations for TOF if [[ $CAN_DO_CALIB_TOF_DIAGNOSTICS == 1 ]]; then if [[ -z ${CALIB_TOF_DIAGNOSTICS+x} ]]; then CALIB_TOF_DIAGNOSTICS=1; fi @@ -185,6 +197,8 @@ fi ( [[ -z ${CALIB_PHS_L1PHASE:-} ]] || [[ $CAN_DO_CALIB_PHS_L1PHASE == 0 ]] ) && CALIB_PHS_L1PHASE=0 ( [[ -z ${CALIB_CPV_GAIN:-} ]] || [[ $CAN_DO_CALIB_CPV_GAIN == 0 ]] ) && CALIB_CPV_GAIN=0 ( [[ -z ${CALIB_ZDC_TDC:-} ]] || [[ $CAN_DO_CALIB_ZDC_TDC == 0 ]] ) && CALIB_ZDC_TDC=0 +( [[ -z ${CALIB_ITS_DEADMAP_TIME:-} ]] || [[ $CAN_DO_CALIB_ITS_DEADMAP_TIME == 0 ]] ) && CALIB_ITS_DEADMAP_TIME=0 +( [[ -z ${CALIB_MFT_DEADMAP_TIME:-} ]] || [[ $CAN_DO_CALIB_MFT_DEADMAP_TIME == 0 ]] ) && CALIB_MFT_DEADMAP_TIME=0 # for async: ( [[ -z ${CALIB_EMC_ASYNC_RECALIB:-} ]] || [[ $CAN_DO_CALIB_EMC_ASYNC_RECALIB == 0 ]] ) && CALIB_EMC_ASYNC_RECALIB=0 ( [[ -z ${CALIB_ASYNC_EXTRACTTPCCURRENTS:-} ]] || [[ $CAN_DO_CALIB_ASYNC_EXTRACTTPCCURRENTS == 0 ]] ) && CALIB_ASYNC_EXTRACTTPCCURRENTS=0 @@ -228,6 +242,12 @@ if [[ -z ${CALIBDATASPEC_BARREL_TF:-} ]]; then # prim vtx if [[ $CALIB_PRIMVTX_MEANVTX == 1 ]]; then add_semicolon_separated CALIBDATASPEC_BARREL_TF "pvtx:GLO/PVTX/0"; fi + # ITS + if [[ $CALIB_ITS_DEADMAP_TIME == 1 ]]; then add_semicolon_separated CALIBDATASPEC_BARREL_TF "itsChipStatus:ITS/CHIPSSTATUS/0"; fi + + # MFT + if [[ $CALIB_MFT_DEADMAP_TIME == 1 ]]; then add_semicolon_separated CALIBDATASPEC_BARREL_TF "mftChipStatus:MFT/CHIPSSTATUS/0"; fi + # TOF if [[ $CALIB_TOF_LHCPHASE == 1 ]] || [[ $CALIB_TOF_CHANNELOFFSETS == 1 ]]; then add_semicolon_separated CALIBDATASPEC_BARREL_TF "calibTOF:TOF/CALIBDATA/0"; fi if [[ $CALIB_TOF_DIAGNOSTICS == 1 ]]; then add_semicolon_separated CALIBDATASPEC_BARREL_TF "diagWords:TOF/DIAFREQ/0"; fi diff --git a/DATA/production/configurations/asyncReco/setenv_extra.sh b/DATA/production/configurations/asyncReco/setenv_extra.sh index c1d82bbca..3a26bc4d8 100644 --- a/DATA/production/configurations/asyncReco/setenv_extra.sh +++ b/DATA/production/configurations/asyncReco/setenv_extra.sh @@ -557,6 +557,8 @@ if [[ $ADD_CALIB == "1" ]]; then export CALIB_FV0_INTEGRATEDCURR=0 export CALIB_FDD_INTEGRATEDCURR=0 export CALIB_TOF_INTEGRATEDCURR=0 + export CALIB_ITS_DEADMAP_TIME=0 + export CALIB_MFT_DEADMAP_TIME=0 if [[ $DO_TPC_RESIDUAL_EXTRACTION == "1" ]]; then export CALIB_TPC_SCDCALIB=1 export CALIB_TPC_SCDCALIB_SENDTRKDATA=1 From 5510ade973ccd2092cc18554ea075f902b21129e Mon Sep 17 00:00:00 2001 From: benedikt-voelkel Date: Fri, 1 Mar 2024 12:20:05 +0100 Subject: [PATCH 086/101] [AnalysisQC] Make common args better adjustable (#1514) Co-authored-by: Benedikt Volkel --- MC/analysis_testing/o2dpg_analysis_test_utils.py | 9 ++++++++- MC/analysis_testing/o2dpg_analysis_test_workflow.py | 2 +- MC/config/analysis_testing/json/analyses_config.json | 3 +++ 3 files changed, 12 insertions(+), 2 deletions(-) diff --git a/MC/analysis_testing/o2dpg_analysis_test_utils.py b/MC/analysis_testing/o2dpg_analysis_test_utils.py index 1a3901fe6..ee896f12f 100755 --- a/MC/analysis_testing/o2dpg_analysis_test_utils.py +++ b/MC/analysis_testing/o2dpg_analysis_test_utils.py @@ -69,7 +69,7 @@ def full_ana_name(raw_ana_name): return f"{ANALYSIS_LABEL}_{raw_ana_name}" -def get_common_args_as_string(analysis_name, all_common_args): +def get_common_args_as_string(ana, all_common_args): """ all_common_args is of the form [-shm-segment-size , -readers , ...] @@ -88,6 +88,11 @@ def make_args_string(args_map_in): "readers": 1, "aod-memory-rate-limit": 500000000} + # get common args from analysis configuration and add to args_map + common_args_from_config = ana.get("common_args", {}) + for key, value in common_args_from_config.items(): + args_map[key] = value + # arguments dedicated for this analysis args_map_overwrite = {} @@ -98,6 +103,8 @@ def make_args_string(args_map_in): print("ERROR: Cannot digest common args.") return None + analysis_name = ana["name"] + for i in range(0, len(all_common_args), 2): tokens = all_common_args[i].split("-") key = "-".join(tokens[1:]) diff --git a/MC/analysis_testing/o2dpg_analysis_test_workflow.py b/MC/analysis_testing/o2dpg_analysis_test_workflow.py index 75058219e..c50ed6999 100755 --- a/MC/analysis_testing/o2dpg_analysis_test_workflow.py +++ b/MC/analysis_testing/o2dpg_analysis_test_workflow.py @@ -251,7 +251,7 @@ def add_analysis_tasks(workflow, input_aod="./AO2D.root", output_dir="./Analysis continue print(f"INFO: Analysis {ana['name']} uses configuration {configuration}") - add_common_args_ana = get_common_args_as_string(ana["name"], add_common_args) + add_common_args_ana = get_common_args_as_string(ana, add_common_args) if not add_common_args_ana: print(f"ERROR: Cannot parse common args for analysis {ana['name']}") continue diff --git a/MC/config/analysis_testing/json/analyses_config.json b/MC/config/analysis_testing/json/analyses_config.json index 63c67b285..6be135b8b 100644 --- a/MC/config/analysis_testing/json/analyses_config.json +++ b/MC/config/analysis_testing/json/analyses_config.json @@ -233,6 +233,9 @@ "expected_output": ["AnalysisResults.root"], "valid_mc": true, "valid_data": true, + "common_args": { + "shm-segment-size": 2500000000 + }, "tasks": ["o2-analysis-je-emc-eventselection-qa", "o2-analysis-je-emc-cellmonitor", "o2-analysis-je-emcal-correction-task", From 0bb1b31c03377dc1caebf22e5e5ba75923ad2f05 Mon Sep 17 00:00:00 2001 From: benedikt-voelkel Date: Fri, 1 Mar 2024 12:29:52 +0100 Subject: [PATCH 087/101] [Anchor] Remove -k from QC run, return error code (#1513) Co-authored-by: Benedikt Volkel --- MC/run/ANCHOR/anchorMC.sh | 15 +++++++-------- 1 file changed, 7 insertions(+), 8 deletions(-) diff --git a/MC/run/ANCHOR/anchorMC.sh b/MC/run/ANCHOR/anchorMC.sh index 13512fea7..c1096e7ac 100755 --- a/MC/run/ANCHOR/anchorMC.sh +++ b/MC/run/ANCHOR/anchorMC.sh @@ -46,8 +46,8 @@ print_help() echo "ALIEN_JDL_SIMENGINE or SIMENGINE, choose the transport engine, default: TGeant4," echo "ALIEN_JDL_WORKFLOWDETECTORS, set detectors to be taken into account, default: ITS,TPC,TOF,FV0,FT0,FDD,MID,MFT,MCH,TRD,EMC,PHS,CPV,HMP,CTP," echo "ALIEN_JDL_ANCHOR_SIM_OPTIONS, additional options that are passed to the workflow creation, default: -gen pythia8," - echo "ALIEN_JDL_ADDTIMESERIESINMC, run TPC time series. Default: 1, switch off by setting to 0," - echo "DISABLE_QC, set this to disable QC, e.g. to 1" + echo "ALIEN_JDL_ADDTIMESERIESINMC, run TPC time series. Switch off by setting to 0, default: 1," + echo "ALIEN_JDL_ANCHOR_SIM_DISABLE_QC|ANCHOR_SIM_DISABLE_QC, set this to disable QC, e.g. to 1, default: 0," } # Prevent the script from being soured to omit unexpected surprises when exit is used @@ -85,6 +85,7 @@ export ALIEN_JDL_SIMENGINE=${ALIEN_JDL_SIMENGINE:-${SIMENGINE:-TGeant4}} export ALIEN_JDL_WORKFLOWDETECTORS=${ALIEN_JDL_WORKFLOWDETECTORS:-ITS,TPC,TOF,FV0,FT0,FDD,MID,MFT,MCH,TRD,EMC,PHS,CPV,HMP,CTP} # can be passed to contain additional options that will be passed to o2dpg_sim_workflow_anchored.py and eventually to o2dpg_sim_workflow.py export ALIEN_JDL_ANCHOR_SIM_OPTIONS=${ALIEN_JDL_ANCHOR_SIM_OPTIONS:--gen pythia8} +export ALIEN_JDL_ANCHOR_SIM_DISABLE_QC=${ALIEN_JDL_ANCHOR_SIM_DISABLE_QC:-${ANCHOR_SIM_DISABLE_QC:-0}} # all others MUST be set by the user/on the outside export ALIEN_JDL_LPMANCHORPASSNAME=${ALIEN_JDL_LPMANCHORPASSNAME:-${ANCHORPASSNAME}} export ALIEN_JDL_MCANCHOR=${ALIEN_JDL_MCANCHOR:-${MCANCHOR}} @@ -237,7 +238,7 @@ export FAIRMQ_IPC_PREFIX=./ echo "Ready to start main workflow" -${O2DPG_ROOT}/MC/bin/o2_dpg_workflow_runner.py -f workflow.json -tt ${ALIEN_JDL_O2DPGWORKFLOWTARGET:-aod} --cpu-limit ${ALIEN_JDL_CPULIMIT:-8} +${O2DPG_ROOT}/MC/bin/o2_dpg_workflow_runner.py -f workflow.json -tt ${ALIEN_JDL_O2DPGWORKFLOWTARGET:-aod} --cpu-limit ${ALIEN_JDL_CPULIMIT} MCRC=$? # <--- we'll report back this code if [[ "${ALIEN_JDL_ADDTIMESERIESINMC}" != "0" ]]; then @@ -246,14 +247,12 @@ if [[ "${ALIEN_JDL_ADDTIMESERIESINMC}" != "0" ]]; then ${O2DPG_ROOT}/MC/bin/o2_dpg_workflow_runner.py -f workflow.json -tt tpctimes fi -[[ ! -z "${DISABLE_QC}" ]] && echo "INFO: QC is disabled, skip it." +[[ "${ALIEN_JDL_ANCHOR_SIM_DISABLE_QC}" != "0" ]] && echo "INFO: QC is disabled, skip it." -if [[ -z "${DISABLE_QC}" && "${MCRC}" = "0" && "${remainingargs}" == *"--include-local-qc"* ]] ; then +if [[ "${ALIEN_JDL_ANCHOR_SIM_DISABLE_QC}" == "0" && "${MCRC}" = "0" && "${remainingargs}" == *"--include-local-qc"* ]] ; then # do QC tasks echo "Doing QC" - ${O2DPG_ROOT}/MC/bin/o2_dpg_workflow_runner.py -f workflow.json --target-labels QC --cpu-limit ${ALIEN_JDL_CPULIMIT:-8} -k - # NOTE that with the -k|--keep-going option, the runner will try to keep on executing even if some tasks fail. - # That means, even if there is a failing QC task, the return code will be 0 + ${O2DPG_ROOT}/MC/bin/o2_dpg_workflow_runner.py -f workflow.json --target-labels QC --cpu-limit ${ALIEN_JDL_CPULIMIT} MCRC=$? fi From 928d82f57b81889dbdf0b71e55921cb4d5cfc483 Mon Sep 17 00:00:00 2001 From: benedikt-voelkel Date: Fri, 1 Mar 2024 13:06:07 +0100 Subject: [PATCH 088/101] Revert "[Anchor] Remove -k from QC run, return error code (#1513)" (#1515) This reverts commit 0bb1b31c03377dc1caebf22e5e5ba75923ad2f05. --- MC/run/ANCHOR/anchorMC.sh | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/MC/run/ANCHOR/anchorMC.sh b/MC/run/ANCHOR/anchorMC.sh index c1096e7ac..13512fea7 100755 --- a/MC/run/ANCHOR/anchorMC.sh +++ b/MC/run/ANCHOR/anchorMC.sh @@ -46,8 +46,8 @@ print_help() echo "ALIEN_JDL_SIMENGINE or SIMENGINE, choose the transport engine, default: TGeant4," echo "ALIEN_JDL_WORKFLOWDETECTORS, set detectors to be taken into account, default: ITS,TPC,TOF,FV0,FT0,FDD,MID,MFT,MCH,TRD,EMC,PHS,CPV,HMP,CTP," echo "ALIEN_JDL_ANCHOR_SIM_OPTIONS, additional options that are passed to the workflow creation, default: -gen pythia8," - echo "ALIEN_JDL_ADDTIMESERIESINMC, run TPC time series. Switch off by setting to 0, default: 1," - echo "ALIEN_JDL_ANCHOR_SIM_DISABLE_QC|ANCHOR_SIM_DISABLE_QC, set this to disable QC, e.g. to 1, default: 0," + echo "ALIEN_JDL_ADDTIMESERIESINMC, run TPC time series. Default: 1, switch off by setting to 0," + echo "DISABLE_QC, set this to disable QC, e.g. to 1" } # Prevent the script from being soured to omit unexpected surprises when exit is used @@ -85,7 +85,6 @@ export ALIEN_JDL_SIMENGINE=${ALIEN_JDL_SIMENGINE:-${SIMENGINE:-TGeant4}} export ALIEN_JDL_WORKFLOWDETECTORS=${ALIEN_JDL_WORKFLOWDETECTORS:-ITS,TPC,TOF,FV0,FT0,FDD,MID,MFT,MCH,TRD,EMC,PHS,CPV,HMP,CTP} # can be passed to contain additional options that will be passed to o2dpg_sim_workflow_anchored.py and eventually to o2dpg_sim_workflow.py export ALIEN_JDL_ANCHOR_SIM_OPTIONS=${ALIEN_JDL_ANCHOR_SIM_OPTIONS:--gen pythia8} -export ALIEN_JDL_ANCHOR_SIM_DISABLE_QC=${ALIEN_JDL_ANCHOR_SIM_DISABLE_QC:-${ANCHOR_SIM_DISABLE_QC:-0}} # all others MUST be set by the user/on the outside export ALIEN_JDL_LPMANCHORPASSNAME=${ALIEN_JDL_LPMANCHORPASSNAME:-${ANCHORPASSNAME}} export ALIEN_JDL_MCANCHOR=${ALIEN_JDL_MCANCHOR:-${MCANCHOR}} @@ -238,7 +237,7 @@ export FAIRMQ_IPC_PREFIX=./ echo "Ready to start main workflow" -${O2DPG_ROOT}/MC/bin/o2_dpg_workflow_runner.py -f workflow.json -tt ${ALIEN_JDL_O2DPGWORKFLOWTARGET:-aod} --cpu-limit ${ALIEN_JDL_CPULIMIT} +${O2DPG_ROOT}/MC/bin/o2_dpg_workflow_runner.py -f workflow.json -tt ${ALIEN_JDL_O2DPGWORKFLOWTARGET:-aod} --cpu-limit ${ALIEN_JDL_CPULIMIT:-8} MCRC=$? # <--- we'll report back this code if [[ "${ALIEN_JDL_ADDTIMESERIESINMC}" != "0" ]]; then @@ -247,12 +246,14 @@ if [[ "${ALIEN_JDL_ADDTIMESERIESINMC}" != "0" ]]; then ${O2DPG_ROOT}/MC/bin/o2_dpg_workflow_runner.py -f workflow.json -tt tpctimes fi -[[ "${ALIEN_JDL_ANCHOR_SIM_DISABLE_QC}" != "0" ]] && echo "INFO: QC is disabled, skip it." +[[ ! -z "${DISABLE_QC}" ]] && echo "INFO: QC is disabled, skip it." -if [[ "${ALIEN_JDL_ANCHOR_SIM_DISABLE_QC}" == "0" && "${MCRC}" = "0" && "${remainingargs}" == *"--include-local-qc"* ]] ; then +if [[ -z "${DISABLE_QC}" && "${MCRC}" = "0" && "${remainingargs}" == *"--include-local-qc"* ]] ; then # do QC tasks echo "Doing QC" - ${O2DPG_ROOT}/MC/bin/o2_dpg_workflow_runner.py -f workflow.json --target-labels QC --cpu-limit ${ALIEN_JDL_CPULIMIT} + ${O2DPG_ROOT}/MC/bin/o2_dpg_workflow_runner.py -f workflow.json --target-labels QC --cpu-limit ${ALIEN_JDL_CPULIMIT:-8} -k + # NOTE that with the -k|--keep-going option, the runner will try to keep on executing even if some tasks fail. + # That means, even if there is a failing QC task, the return code will be 0 MCRC=$? fi From cf41e3246dcd6accdda8493135cd276b72479507 Mon Sep 17 00:00:00 2001 From: benedikt-voelkel Date: Fri, 1 Mar 2024 18:15:09 +0100 Subject: [PATCH 089/101] [SimCI] Revise tests (#1517) * standalone AnalysisQC test has been remobved in favor of developments in workflow test * generators * do not blindly test all INI files when run_generator_tests.sh changes * find files correctly when there are other changed files not related to generator testing * workflows revised logic such that * if MC/bin changes --> run anchored, --> check correct creation of workflows implemented by PWGs, --> test AnalysisQC and QC * if MC/analysis_testing or MC-related QC configurations change --> test AnalysisQC and QC, --> test O2DPG AnalysisQC CLI * if anchored-related shell scripts change --> run anchored * relval * no changes Co-authored-by: Benedikt Volkel --- test/common/utils/utils.sh | 34 +++- test/run_analysisqc_tests.sh | 158 +----------------- test/run_generator_tests.sh | 48 +++--- test/run_workflow_tests.sh | 314 +++++++++++++++++------------------ 4 files changed, 207 insertions(+), 347 deletions(-) diff --git a/test/common/utils/utils.sh b/test/common/utils/utils.sh index 0c34c5395..03c01d96f 100644 --- a/test/common/utils/utils.sh +++ b/test/common/utils/utils.sh @@ -4,6 +4,33 @@ # Test utility functionality # +# a global counter for tests +TEST_COUNTER=0 + +# Prepare some colored output +SRED="\033[0;31m" +SGREEN="\033[0;32m" +SYELLOW="\033[0;33m" +SEND="\033[0m" + +echo_green() +{ + echo -e "${SGREEN}${*}${SEND}" +} + + +echo_red() +{ + echo -e "${SRED}${*}${SEND}" +} + + +echo_yellow() +{ + echo -e "${SYELLOW}${*}${SEND}" +} + + remove_artifacts() { [[ "${KEEP_ONLY_LOGS}" == "1" ]] && find . -type f ! -name '*.log' -and ! -name "*serverlog*" -and ! -name "*mergerlog*" -and ! -name "*workerlog*" -delete @@ -25,7 +52,12 @@ get_changed_files() [[ ! -z "$(git diff)" && -z ${ALIBUILD_HEAD_HASH+x} && -z ${O2DPG_TEST_HASH_HEAD+x} ]] && hash_head="" # if there are unstaged changes and no base from user, set to HEAD [[ ! -z "$(git diff)" && -z ${ALIBUILD_HEAD_HASH+x} && -z ${O2DPG_TEST_HASH_BASE+x} ]] && hash_base="HEAD" - git diff --diff-filter=AMR --name-only ${hash_base} ${hash_head} + local paths=$(git diff --diff-filter=AMR --name-only ${hash_base} ${hash_head}) + local absolute_paths= + for p in ${paths} ; do + absolute_paths+="$(realpath ${p}) " + done + echo "${absolute_paths}" } diff --git a/test/run_analysisqc_tests.sh b/test/run_analysisqc_tests.sh index bd57493fd..a4064ba30 100755 --- a/test/run_analysisqc_tests.sh +++ b/test/run_analysisqc_tests.sh @@ -1,160 +1,4 @@ #!/bin/bash -# The test parent dir to be cretaed in current directory -TEST_PARENT_DIR="o2dpg_tests/analysisqc" - -# unified names of log files -LOG_FILE="o2dpg-test-analysisqc.log" - -# Prepare some colored output -SRED="\033[0;31m" -SGREEN="\033[0;32m" -SEND="\033[0m" - - -echo_green() -{ - echo -e "${SGREEN}${*}${SEND}" -} - - -echo_red() -{ - echo -e "${SRED}${*}${SEND}" -} - - -get_git_repo_directory() -{ - local repo= - if [[ -d .git ]] ; then - pwd - else - repo=$(git rev-parse --git-dir 2> /dev/null) - fi - [[ "${repo}" != "" ]] && repo=${repo%%/.git} - echo ${repo} -} - - -test_analysisqc() -{ - echo "### Testing AnalysisQC creation for MC ###" > ${LOG_FILE} - ${O2DPG_ROOT}/MC/analysis_testing/o2dpg_analysis_test_workflow.py -f AO2D.root --is-mc -o wokflow_test_mc.json >> ${LOG_FILE} 2>&1 - local ret=${?} - [[ "${ret}" != "0" ]] && echo "[FATAL]: O2DPG_TEST failed" >> ${LOG_FILE} - echo "### Testing AnalysisQC creation for data ###" >> ${LOG_FILE} - ${O2DPG_ROOT}/MC/analysis_testing/o2dpg_analysis_test_workflow.py -f AO2D.root -o wokflow_test_data.json >> ${LOG_FILE} 2>&1 - local ret_data=${?} - [[ "${ret_data}" != "0" ]] && { echo "[FATAL]: O2DPG_TEST failed" >> ${LOG_FILE} ; ret=${ret_data} ; } - return ${ret} -} - - -print_usage() -{ - echo - echo "usage: run_workflow_tests.sh" - echo - echo " ENVIRONMENT VARIABLES:" - echo - echo " O2DPG_TEST_REPO_DIR : Point to the source repository you want to test." - echo " O2DPG_TEST_HASH_BASE : The base hash you want to use for comparison (optional)" - echo " O2DPG_TEST_HASH_HEAD : The head hash you want to use for comparison (optional)" - echo - echo " If O2DPG_TEST_HASH_BASE is not set, it will be looked for ALIBUILD_BASE_HASH." - echo " If also not set, this will be set to HEAD~1. However, if there are unstaged" - echo " changes, it will be set to HEAD." - echo - echo " If O2DPG_TEST_HASH_HEAD is not set, it will be looked for ALIBUILD_HEAD_HASH." - echo " If also not set, this will be set to HEAD. However, if there are unstaged" - echo " changes, it will left blank." - echo -} - -while [ "$1" != "" ] ; do - case $1 in - --help|-h ) print_usage - exit 1 - ;; - * ) echo "Unknown argument ${1}" - exit 1 - ;; - esac -done - -echo -echo "################################" -echo "# Run O2DPG AnalysisQC testing #" -echo "################################" -echo - -REPO_DIR=${O2DPG_TEST_REPO_DIR:-$(get_git_repo_directory)} -if [[ ! -d ${REPO_DIR}/.git ]] ; then - echo_red "Directory \"${REPO_DIR}\" is not a git repository." - exit 1 -fi - -if [[ -z ${O2DPG_ROOT+x} ]] ; then - echo_red "O2DPG is not loaded, probably other packages are missing as well in this environment." - exit 1 -fi - -# source the utilities -source ${REPO_DIR}/test/common/utils/utils.sh - -# Do the initial steps in the source dir where we have the full git repo -pushd ${REPO_DIR} > /dev/null - -# flag if anything changed for AnalysisQC -need_testing=$(get_changed_files | grep "MC/.*analysis_testing") - -# go back to where we came from -popd > /dev/null -REPO_DIR=$(realpath ${REPO_DIR}) - -# Now, do the trick: -# We just use the source dir since O2DPG's installation is basically just a copy of the whole repo. -# This makes sense in particular for local testing but also in the CI it works in the same way. We could do -# [[ -z {ALIBUILD_HEAD_HASH+x} ]] && export O2DPG_ROOT=${REPO_DIR} -# but let's do the same for both local and CI consistently -export O2DPG_ROOT=${REPO_DIR} - - -############### -# Let's do it # -############### -ret_global=0 -# prepare our local test directory for PWG tests -rm -rf ${TEST_PARENT_DIR} 2>/dev/null -mkdir -p ${TEST_PARENT_DIR} 2>/dev/null -pushd ${TEST_PARENT_DIR} > /dev/null - -# Test what we found -if [[ "${need_testing}" != "" ]] ; then - test_analysisqc - ret_global=${?} -else - echo "Nothing to test" - exit 0 -fi - -# return to where we came from -popd > /dev/null - -# However, if a central test fails, exit code will be !=0 -if [[ "${ret_global}" != "0" ]] ; then - echo - echo "########################" - echo "# ERROR for AnalysisQC #" - echo "########################" - echo - print_error_logs ${TEST_PARENT_DIR} - exit ${ret_global} -fi - -echo -echo_green "AnalysisQC tests successful" -echo - +# for now, obsolete exit 0 diff --git a/test/run_generator_tests.sh b/test/run_generator_tests.sh index d5a4d3c74..79addd3ea 100755 --- a/test/run_generator_tests.sh +++ b/test/run_generator_tests.sh @@ -250,35 +250,26 @@ add_ini_files_from_tests() done } -add_ini_files_from_all_tests() -{ - # Collect also those INI files for which the test has been changed - local all_tests=$(find ${REPO_DIR} -name "*.C" | grep "MC/.*/ini/tests") - local repo_dir_head=${REPO_DIR} - for t in ${all_tests} ; do - local this_test=$(realpath ${t}) - this_test=${this_test##${repo_dir_head}/} - local tc=$(basename ${this_test}) - this_test=${this_test%%/tests/*} - tc=${tc%.C}.ini - tc=${this_test}/${tc} - [[ "${INI_FILES}" == *"${tc}"* ]] && continue - INI_FILES+=" ${tc} " - done -} - collect_ini_files() { # Collect all INI files which have changed - local ini_files=$(get_changed_files | grep ".ini$" | grep "MC/config") - for ini in ${ini_files} ; do + local changed_files=$(get_changed_files) + for ini in ${changed_files} ; do + [[ "${ini}" != *"MC/config"*".ini" ]] && continue [[ "${INI_FILES}" == *"${ini}"* ]] && continue || INI_FILES+=" ${ini} " done # this relies on INI_FILES and MACRO_FILES_POTENTIALLY_INCLUDED # collect all INI files that might include some changed macros - add_ini_files_from_macros $(get_changed_files | grep ".C$" | grep "MC/config") + changed_files=$(get_changed_files) + local macros= + for m in ${changed_files} ; do + [[ "${m}" != *"MC/config"*".C" ]] && continue + macros+=" ${m} " + done + + add_ini_files_from_macros ${macros} # this relies on MACRO_FILES_POTENTIALLY_INCLUDED # collect all INI files that might contain macros which in turn include changed macros @@ -286,7 +277,13 @@ collect_ini_files() add_ini_files_from_macros $(find_including_macros) # also tests might have changed in which case we run them - add_ini_files_from_tests $(get_changed_files | grep ".C$" | grep "MC/.*/ini/tests") + changed_files=$(get_changed_files) + local macros= + for m in ${changed_files} ; do + [[ "${m}" != *"MC/"*"ini/tests"*".C" ]] && continue + macros+=" ${m} " + done + add_ini_files_from_tests ${macros} } @@ -361,12 +358,12 @@ echo REPO_DIR=${O2DPG_TEST_REPO_DIR:-$(get_git_repo_directory)} if [[ ! -d ${REPO_DIR}/.git ]] ; then - echo_red "Directory \"${REPO_DIR}\" is not a git repository." + echo "ERROR: Directory \"${REPO_DIR}\" is not a git repository." exit 1 fi if [[ -z ${O2DPG_ROOT+x} ]] ; then - echo_red "O2DPG is not loaded, probably other packages are missing as well in this environment." + echo "ERROR: O2DPG is not loaded, probably other packages are missing as well in this environment." exit 1 fi @@ -376,11 +373,6 @@ source ${REPO_DIR}/test/common/utils/utils.sh # Do the initial steps in the source dir where we have the full git repo pushd ${REPO_DIR} > /dev/null -# First check, if testing itself has changed. In that case this will add INI files -# for which a test can be found -global_testing_changed=$(get_changed_files | grep -E "common/kine_tests/test_generic_kine.C|run_generator_tests.sh" | grep "^test/") -[[ "${global_testing_changed}" != "" ]] && add_ini_files_from_all_tests - # Then add the ini files that have changed as well. We need to do that so we get information # about missing tests etc. collect_ini_files diff --git a/test/run_workflow_tests.sh b/test/run_workflow_tests.sh index 2acde102e..9962b3293 100755 --- a/test/run_workflow_tests.sh +++ b/test/run_workflow_tests.sh @@ -2,56 +2,30 @@ # The test parent dir to be cretaed in current directory TEST_PARENT_DIR_PWG="o2dpg_tests/workflows_pwgs" -TEST_PARENT_DIR_BIN="o2dpg_tests/workflows_bin" +TEST_PARENT_DIR_BIN="o2dpg_tests/workflows_analysisqc" TEST_PARENT_DIR_ANCHORED="o2dpg_tests/anchored" -# a global counter for tests -TEST_COUNTER=0 - # unified names of log files LOG_FILE_WF="o2dpg-test-wf.log" LOG_FILE_ANCHORED="o2dpg-test-anchored.log" - -# Prepare some colored output -SRED="\033[0;31m" -SGREEN="\033[0;32m" -SEND="\033[0m" - - -echo_green() -{ - echo -e "${SGREEN}${*}${SEND}" -} +LOG_FILE_ANALYSISQC="o2dpg-test_analysisqc.log" -echo_red() -{ - echo -e "${SRED}${*}${SEND}" -} - get_git_repo_directory() { + local look_dir=${1:-$(pwd)} + look_dir=$(realpath "${look_dir}") + look_dir=${look_dir%%/.git} local repo= - if [[ -d .git ]] ; then - pwd - else + ( + cd "${look_dir}" repo=$(git rev-parse --git-dir 2> /dev/null) - fi - [[ "${repo}" != "" ]] && repo=${repo%%/.git} + [[ "${repo}" != "" ]] && { repo=$(realpath "${repo}") ; repo=${repo%%/.git} ; } + ) echo ${repo} } -get_all_workflows() -{ - # Collect also those INI files for which the test has been changed - local repo_dir_head=${REPO_DIR} - local grep_dir=${1} - local all_workflows=$(find ${repo_dir_head} -name "*.sh" | grep "${grep_dir}") - echo ${all_workflows} -} - - test_single_wf() { local wf_script=${1} @@ -59,12 +33,12 @@ test_single_wf() make_wf_creation_script ${wf_script} ${wf_script_local} local has_wf_script_local=${?} echo -n "Test ${TEST_COUNTER}: ${wfs}" - [[ "${has_wf_script_local}" != "0" ]] && { echo "No WF creation in script ${wfs} ##########" ; return 1 ; } + [[ "${has_wf_script_local}" != "0" ]] && { echo -n " (No WF creation in script)" ; echo_red " -> FAILED" ; return 1 ; } # Check if there is an "exit" other than the usual # [ ! "${O2DPG_ROOT}" ] && echo "Error: This needs O2DPG loaded" && exit 1 # like ones. # This is not perfect but might prevent us from running into some checks the WF script does before launching the WF creation - [[ "$(grep exit ${wf_script_local} | grep -v "This needs")" != "" ]] && { echo -e -n "\nFound \"exit\" in ${wfs} so will not test automatically" ; return 0 ; } + [[ "$(grep exit ${wf_script_local} | grep -v "This needs")" != "" ]] && { echo -n " (Found \"exit\" in script, not testing automatically)" ; echo_yellow " -> WARNING" ; return 0 ; } # one single test echo "Test ${wf_line} from ${wfs}" > ${LOG_FILE_WF} bash ${wf_script_local} >> ${LOG_FILE_WF} 2>&1 @@ -72,6 +46,7 @@ test_single_wf() local ret_this_qc=0 local ret_this_analysis=0 if [[ "${ret_this}" != "0" ]] ; then + echo_red " -> FAILED" echo "[FATAL]: O2DPG_TEST Workflow creation failed" >> ${LOG_FILE_WF} elif [[ "${execute}" != "" ]] ; then local memlimit=${O2DPG_TEST_WORKFLOW_MEMLIMIT:+--mem-limit ${O2DPG_TEST_WORKFLOW_MEMLIMIT}} @@ -80,7 +55,9 @@ test_single_wf() [[ "${ret_this}" == "0" ]] && { ${O2DPG_ROOT}/MC/bin/o2_dpg_workflow_runner.py -f workflow.json --cpu-limit 8 --target-labels QC ${memlimit} >> ${LOG_FILE_WF} 2>&1 ; ret_this_qc=${?} ; } [[ "${ret_this}" == "0" ]] && { ${O2DPG_ROOT}/MC/bin/o2_dpg_workflow_runner.py -f workflow.json --cpu-limit 8 --target-labels Analysis ${memlimit} >> ${LOG_FILE_WF} 2>&1 ; ret_this_analysis=${?} ; } ret_this=$((ret_this + ret_this_qc + ret_this_analysis)) - [[ "${ret_this}" != "0" ]] && echo "[FATAL]: O2DPG_TEST Workflow execution failed" >> ${LOG_FILE_WF} + [[ "${ret_this}" != "0" ]] && echo "[FATAL]: O2DPG_TEST Workflow execution failed" >> ${LOG_FILE_WF} || echo_green " -> PASSED" + else + echo_green " -> PASSED" fi return ${ret_this} } @@ -91,8 +68,8 @@ run_workflow_creation() local execute= while [ "$1" != "" ] ; do case $1 in - --execute ) shift - execute=1 + --execute ) execute=1 + shift ;; * ) wf_scripts+="${1} " shift @@ -116,16 +93,34 @@ run_workflow_creation() local ret_this=${?} [[ "${ret_this}" != "0" ]] && RET=${ret_this} popd > /dev/null - if [[ "${ret_this}" != "0" ]] ; then - echo_red " -> FAILED" - else - echo_green " -> PASSED" - fi done return ${RET} } + +test_analysisqc_cli() +{ + ((TEST_COUNTER++)) + local test_dir="${TEST_COUNTER}_analysisqc_cli" + rm -rf ${test_dir} 2> /dev/null + mkdir ${test_dir} + pushd ${test_dir} > /dev/null + echo "### Testing AnalysisQC creation for MC ###" > ${LOG_FILE_ANALYSISQC} + echo -n "Test ${TEST_COUNTER}: Running AnalysisQC CLI" + ${O2DPG_ROOT}/MC/analysis_testing/o2dpg_analysis_test_workflow.py -f AO2D.root --is-mc -o wokflow_test_mc.json >> ${LOG_FILE_ANALYSISQC} 2>&1 + local ret=${?} + [[ "${ret}" != "0" ]] && echo "[FATAL]: O2DPG_TEST failed" >> ${LOG_FILE_ANALYSISQC} + echo "### Testing AnalysisQC creation for data ###" >> ${LOG_FILE_ANALYSISQC} + ${O2DPG_ROOT}/MC/analysis_testing/o2dpg_analysis_test_workflow.py -f AO2D.root -o wokflow_test_data.json >> ${LOG_FILE_ANALYSISQC} 2>&1 + local ret_data=${?} + [[ "${ret_data}" != "0" ]] && { echo "[FATAL]: O2DPG_TEST failed" >> ${LOG_FILE_ANALYSISQC} ; ret=${ret_data} ; } + popd > /dev/null + [[ "${ret}" != "0" ]] && echo_red " -> FAILED" || echo_green " -> PASSED" + return ${ret} +} + + test_anchored() { local to_run="${1:-${O2DPG_ROOT}/MC/run/ANCHOR/tests/test_anchor_2023_apass2_pp.sh}" @@ -140,27 +135,24 @@ test_anchored() echo -n "Test ${TEST_COUNTER}: ${anchored_script}" ${anchored_script} >> ${LOG_FILE_ANCHORED} 2>&1 local ret_this=${?} - [[ "${ret_this}" != "0" ]] && RET=${ret_this} + if [[ "${ret_this}" != "0" ]] ; then + echo_red " -> FAILED" + RET=${ret_this} + else + echo_green " -> PASSED" + fi popd > /dev/null done return ${RET} } -collect_changed_pwg_wf_files() -{ - # Collect all INI files which have changed - local wf_scripts=$(get_changed_files | grep ".sh$" | grep "MC/run") - for wfs in ${wf_scripts} ; do - [[ "${WF_FILES}" == *"${wfs}"* ]] && continue || WF_FILES+=" ${wfs} " - done -} - print_usage() { + echo echo "usage: run_workflow_tests.sh" echo - echo " ENVIRONMENT VARIABLES:" + echo " ENVIRONMENT VARIABLES TO DETERMINE WHAT TO COMPARE:" echo echo " O2DPG_TEST_REPO_DIR : Point to the source repository you want to test." echo " O2DPG_TEST_HASH_BASE : The base hash you want to use for comparison (optional)" @@ -174,10 +166,15 @@ print_usage() echo " If also not set, this will be set to HEAD. However, if there are unstaged" echo " changes, it will left blank." echo + echo " SPECIFIC ENVIRONMENT VARIABLES FOR THIS TEST:" echo " O2DPG_TEST_WORKFLOW_MEMLIMIT : The memory limit that is passed to the workflow runner in case a workflow is executed (optional)" echo } + +############# +# Main part # +############# while [ "$1" != "" ] ; do case $1 in --help|-h ) print_usage @@ -189,47 +186,70 @@ while [ "$1" != "" ] ; do esac done -echo -echo "##############################" -echo "# Run O2DPG workflow testing #" -echo "##############################" -echo - +# determine the repository directory REPO_DIR=${O2DPG_TEST_REPO_DIR:-$(get_git_repo_directory)} if [[ ! -d ${REPO_DIR}/.git ]] ; then - echo_red "Directory \"${REPO_DIR}\" is not a git repository." + echo "ERROR: Directory \"${REPO_DIR}\" is not a git repository." exit 1 fi if [[ -z ${O2DPG_ROOT+x} ]] ; then - echo_red "O2DPG is not loaded, probably other packages are missing as well in this environment." + echo "ERROR: O2DPG is not loaded, probably other packages are missing as well in this environment." exit 1 fi # source the utilities source ${REPO_DIR}/test/common/utils/utils.sh + +echo "##############################" +echo "# Run O2DPG workflow testing #" +echo "##############################" + # Do the initial steps in the source dir where we have the full git repo pushd ${REPO_DIR} > /dev/null # flag if anything changed in the sim workflow bin dir -changed_wf_bin=$(get_changed_files | grep -E "MC/bin") -changed_wf_bin_related=$(get_changed_files | grep -E "MC/analysis_testing|MC/config/analysis_testing/json|MC/config/QC/json") -changed_anchored_related=$(get_changed_files | grep -E "MC/run/ANCHOR/anchorMC.sh|MC/run/ANCHOR/tests|MC/bin|UTILS/parse-async-WorkflowConfig.py") - +changed_sim_bin=$(get_changed_files | grep -E "MC/bin") +# collect if anything has changed related to AnalysisQC +changed_analysis_qc=$(get_changed_files | grep -E "MC/analysis_testing|MC/config/analysis_testing/json|MC/config/QC/json") +# check if anything has changed concerning anchoring +changed_anchored=$(get_changed_files | grep -E "MC/bin|MC/run/ANCHOR/anchorMC.sh|MC/run/ANCHOR/tests|MC/bin|UTILS/parse-async-WorkflowConfig.py|DATA/production/configurations/asyncReco/setenv_extra.sh|DATA/production/configurations/asyncReco/async_pass.sh|DATA/common/setenv.sh|DATA/production/workflow-multiplicities.sh") +# collect changed workflow scripts +changed_workflows= +# workflows to be executed +execute_workflows= +echo "==> Test outline" +if [[ "${changed_sim_bin}" != "" ]] ; then + # in this case, something central has changed, test creation of all workflows against it + echo " - The creation of simulation workflows from all run scripts (MC/run/**/*.sh) will be tested." + for p in $(find MC/run -name "*.sh") ; do + changed_workflows+="$(realpath ${p}) " + done + # definitely run anchored if central python scripts have changed + echo " - Changes in MC/bin/ detected, mark anchored MC test to be run." + changed_anchored="1" +else + # otherwise, only take the changed shell scripts + changed_workflows= + changed_files=$(get_changed_files) + for cf in ${changed_files} ; do + [[ "${cf}" != *"MC/run"*".sh" ]] && continue + changed_workflows+="${cf} " + done + [[ "${changed_workflows}" != "" ]] && echo " - The creation of simulation workflows from changed run scripts (sub-sect of MC/run/**/*.sh) will be tested." +fi -# collect what has changed for PWGs -collect_changed_pwg_wf_files +if [[ "${changed_analysis_qc}" != "" || "${changed_sim_bin}" ]] ; then + for p in $(find "MC/bin/tests" -name "*.sh") ; do + execute_workflows+="$(realpath ${p}) " + done + echo " - Test AnalysisQC CLI and execution with a simulation." +fi -# get realpaths for all changes -wf_files_tmp=${WF_FILES} -WF_FILES= -for wf_tmp in ${wf_files_tmp} ; do - # convert to full path so that we can find it from anywhere - WF_FILES+="$(realpath ${wf_tmp}) " -done +[[ "${changed_anchored}" != "" ]] && echo " - Test anchored simulation." -# go back to where we came from +# everything collected, go back to where we came from popd > /dev/null REPO_DIR=$(realpath ${REPO_DIR}) @@ -241,111 +261,83 @@ REPO_DIR=$(realpath ${REPO_DIR}) export O2DPG_ROOT=${REPO_DIR} -############### -# ANCHORED MC # -############### -# prepare our local test directory for PWG tests -rm -rf ${TEST_PARENT_DIR_ANCHORED} 2>/dev/null -mkdir -p ${TEST_PARENT_DIR_ANCHORED} 2>/dev/null -pushd ${TEST_PARENT_DIR_ANCHORED} > /dev/null - -# global return code for PWGs -ret_global_anchored=0 -if [[ "${changed_anchored_related}" != "" ]] ; then - echo "### Test anchored ###" - # Run an anchored test - test_anchored - ret_global_anchored=${?} - echo -fi - -# return to where we came from -popd > /dev/null - -######## -# PWGs # -######## -# prepare our local test directory for PWG tests -rm -rf ${TEST_PARENT_DIR_PWG} 2>/dev/null -mkdir -p ${TEST_PARENT_DIR_PWG} 2>/dev/null -pushd ${TEST_PARENT_DIR_PWG} > /dev/null - +############################## +# PWG workflow shell scripts # +############################## # global return code for PWGs ret_global_pwg=0 -if [[ "${changed_wf_bin}" != "" ]] ; then - # Run all the PWG related WF creations, hence overwrite what was collected by collect_changed_pwg_wf_files earlier - WF_FILES=$(get_all_workflows "MC/run/.*/") - echo -fi # Test what we found -if [[ "${WF_FILES}" != "" ]] ; then - echo "### Test PWG-related workflow creation ###" +if [[ "${changed_workflows}" != "" ]] ; then + # prepare our local test directory for PWG tests + rm -rf ${TEST_PARENT_DIR_PWG} 2>/dev/null + mkdir -p ${TEST_PARENT_DIR_PWG} 2>/dev/null + pushd ${TEST_PARENT_DIR_PWG} > /dev/null + echo - run_workflow_creation ${WF_FILES} + echo "==> START BLOCK: Test PWG-related workflow creation <==" + run_workflow_creation ${changed_workflows} ret_global_pwg=${?} - echo + [[ "${ret_global_pwg}" != "0" ]] && { echo "WARNING for workflows creations, some could not be built." ; print_error_logs ./ ; } + echo "==> END BLOCK: Test PWG-related workflow creation <==" + + # return to where we came from + popd > /dev/null fi -# return to where we came from -popd > /dev/null -#################### -# sim workflow bin # -#################### +#################################### +# sim workflow bin with AnalysisQC # +#################################### # prepare our local test directory for bin tests -rm -rf ${TEST_PARENT_DIR_BIN} 2>/dev/null -mkdir -p ${TEST_PARENT_DIR_BIN} 2>/dev/null -pushd ${TEST_PARENT_DIR_BIN} > /dev/null - # global return code for PWGs -ret_global_bin=0 -if [[ "${changed_wf_bin}" != "" || "${changed_wf_bin_related}" != "" ]] ; then - echo "### Test bin-related workflow creation ###" +ret_analysis_qc=0 +if [[ "${changed_analysis_qc}" != "" ]] ; then + rm -rf ${TEST_PARENT_DIR_BIN} 2>/dev/null + mkdir -p ${TEST_PARENT_DIR_BIN} 2>/dev/null + pushd ${TEST_PARENT_DIR_BIN} > /dev/null + echo + echo "==> START BLOCK: Test running workflow with AnalysisQC <==" + # test command line interface + test_analysisqc_cli + ret_analysis_qc=${?} # Run all the bin test WF creations - run_workflow_creation $(get_all_workflows "MC/bin/tests") --execute - ret_global_bin=${?} - echo + [[ "${ret_analysis_qc}" == "0" ]] && { run_workflow_creation ${execute_workflows} --execute ; ret_analysis_qc=${?} ; } + [[ "${ret_analysis_qc}" != "0" ]] && { echo "ERROR for workflows execution and AnalysisQC." ; print_error_logs ./ ; } + echo "==> END BLOCK: Test running workflow with AnalysisQC <==" + + # return to where we came from + popd > /dev/null fi -# return to where we came from -popd > /dev/null -# final printing of log files of failed tests -# For PWG workflows, this triggers only a warning at the moment -if [[ "${ret_global_pwg}" != "0" ]] ; then - echo - echo "#####################################" - echo "# WARNING for PWG-related workflows #" - echo "#####################################" - echo - print_error_logs ${TEST_PARENT_DIR_PWG} -fi +############### +# ANCHORED MC # +############### +# global return code for PWGs +ret_global_anchored=0 +if [[ "${changed_anchored}" != "" ]] ; then + # prepare our local test directory for PWG tests + rm -rf ${TEST_PARENT_DIR_ANCHORED} 2>/dev/null + mkdir -p ${TEST_PARENT_DIR_ANCHORED} 2>/dev/null + pushd ${TEST_PARENT_DIR_ANCHORED} > /dev/null -# However, if a central test fails, exit code will be !=0 -if [[ "${ret_global_bin}" != "0" ]] ; then - echo - echo "###################################" - echo "# ERROR for bin-related workflows #" - echo "###################################" echo - print_error_logs ${TEST_PARENT_DIR_BIN} -fi + echo "==> START BLOCK: Test anchored simulation" + # Run an anchored test + test_anchored + ret_global_anchored=${?} + [[ "${ret_global_anchored}" != "0" ]] && { echo "ERROR executing anchored simulation." ; print_error_logs ./ ; } + echo "==> END BLOCK: Test anchored simulation" -# However, if a central test fails, exit code will be !=0 -if [[ "${ret_global_anchored}" != "0" ]] ; then - echo - echo "##########################" - echo "# ERROR for anchored MCs #" - echo "##########################" - echo - print_error_logs ${TEST_PARENT_DIR_ANCHORED} + # return to where we came from + popd > /dev/null fi -RET=$(( ret_global_bin + ret_global_anchored )) +RET=$(( ret_analysis_qc + ret_global_anchored )) echo -[[ "${RET}" != "0" ]] && echo "There were errors, please check!" || echo_green "All required workflow tests successful" +[[ "${RET}" != "0" ]] && echo_red "There were errors, please check!" || echo_green "All required workflow tests successful" exit ${RET} From 77e6a0613fae0c61f33d65d00b316f9bd2f2d54d Mon Sep 17 00:00:00 2001 From: Chiara Zampolli Date: Fri, 1 Mar 2024 14:36:01 +0100 Subject: [PATCH 090/101] Possibility to take QC alone when we split the wf --- .../configurations/asyncReco/async_pass.sh | 46 +++++++++++++++++-- 1 file changed, 42 insertions(+), 4 deletions(-) diff --git a/DATA/production/configurations/asyncReco/async_pass.sh b/DATA/production/configurations/asyncReco/async_pass.sh index bd6e9ce89..8f1e9e653 100755 --- a/DATA/production/configurations/asyncReco/async_pass.sh +++ b/DATA/production/configurations/asyncReco/async_pass.sh @@ -575,11 +575,17 @@ else fi if ([[ -z "$ALIEN_JDL_SSPLITSTEP" ]] && [[ -z "$ALIEN_JDL_SSPLITSTEP" ]]) || [[ "$ALIEN_JDL_SSPLITSTEP" -eq 3 ]] || ( [[ -n $ALIEN_JDL_STARTSPLITSTEP ]] && [[ "$ALIEN_JDL_STARTSPLITSTEP" -le 3 ]]) || [[ "$ALIEN_JDL_SSPLITSTEP" -eq "all" ]]; then - # 3. matching, QC, calib, AOD + # 3. matching, calib, AOD, potentially QC WORKFLOW_PARAMETERS=$WORKFLOW_PARAMETERS_START + if [[ "$ALIEN_JDL_KEEPQCSEPARATE" == "1" ]]; then + echo "QC will be run as last step, removing it from 3rd step" + for i in QC; do + export WORKFLOW_PARAMETERS=$(echo $WORKFLOW_PARAMETERS | sed -e "s/,$i,/,/g" -e "s/^$i,//" -e "s/,$i"'$'"//" -e "s/^$i"'$'"//") + done + fi echo "WORKFLOW_PARAMETERS=$WORKFLOW_PARAMETERS" - echo "Step 3) matching, QC, calib, AOD" - echo -e "\nStep 3) matching, QC, calib, AOD" >> workflowconfig.log + echo "Step 3) matching, calib, AOD, potentially QC" + echo -e "\nStep 3) matching, calib, AOD, potentially QC" >> workflowconfig.log export TIMEFRAME_RATE_LIMIT=0 echo "Removing detectors $DETECTORS_EXCLUDE" READER_DELAY=${ALIEN_JDL_READERDELAY:-30} @@ -607,13 +613,45 @@ else fi fi fi + if [[ "$ALIEN_JDL_KEEPQCSEPARATE" == "1" ]]; then + if ([[ -z "$ALIEN_JDL_SSPLITSTEP" ]] && [[ -z "$ALIEN_JDL_SSPLITSTEP" ]]) || [[ "$ALIEN_JDL_SSPLITSTEP" -eq 4 ]] || ( [[ -n $ALIEN_JDL_STARTSPLITSTEP ]] && [[ "$ALIEN_JDL_STARTSPLITSTEP" -le 4 ]]) || [[ "$ALIEN_JDL_SSPLITSTEP" -eq "all" ]]; then + # 4. QC + WORKFLOW_PARAMETERS="QC" + echo "WORKFLOW_PARAMETERS=$WORKFLOW_PARAMETERS" + echo "Step 4) QC" + echo -e "\nStep 4) QC" >> workflowconfig.log + export TIMEFRAME_RATE_LIMIT=0 + echo "Removing detectors $DETECTORS_EXCLUDE" + env $SETTING_ROOT_OUTPUT IS_SIMULATED_DATA=0 WORKFLOWMODE=print TFDELAY=$TFDELAYSECONDS WORKFLOW_DETECTORS=ALL WORKFLOW_DETECTORS_EXCLUDE=$DETECTORS_EXCLUDE WORKFLOW_DETECTORS_USE_GLOBAL_READER_TRACKS=ALL WORKFLOW_DETECTORS_USE_GLOBAL_READER_CLUSTERS=ALL WORKFLOW_DETECTORS_EXCLUDE_GLOBAL_READER_TRACKS=HMP WORKFLOW_DETECTORS_EXCLUDE_QC=CPV,$DETECTORS_EXCLUDE ./run-workflow-on-inputlist.sh $INPUT_TYPE list.list >> workflowconfig.log + # run it + if [[ "0$RUN_WORKFLOW" != "00" ]]; then + timeStart=`date +%s` + time env $SETTING_ROOT_OUTPUT IS_SIMULATED_DATA=0 WORKFLOWMODE=run TFDELAY=$TFDELAYSECONDS WORKFLOW_DETECTORS=ALL WORKFLOW_DETECTORS_EXCLUDE=$DETECTORS_EXCLUDE WORKFLOW_DETECTORS_USE_GLOBAL_READER_TRACKS=ALL WORKFLOW_DETECTORS_USE_GLOBAL_READER_CLUSTERS=ALL WORKFLOW_DETECTORS_EXCLUDE_GLOBAL_READER_TRACKS=HMP WORKFLOW_DETECTORS_EXCLUDE_QC=CPV,$DETECTORS_EXCLUDE ./run-workflow-on-inputlist.sh $INPUT_TYPE list.list + exitcode=$? + timeEnd=`date +%s` + timeUsed=$(( $timeUsed+$timeEnd-$timeStart )) + delta=$(( $timeEnd-$timeStart )) + echo "Time spent in running the workflow, Step 4 = $delta s" + echo "exitcode = $exitcode" + if [[ $exitcode -ne 0 ]]; then + echo "exit code from Step 4 of processing is " $exitcode > validation_error.message + echo "exit code from Step 4 of processing is " $exitcode + exit $exitcode + fi + mv latest.log latest_reco_4.log + if [[ -f performanceMetrics.json ]]; then + mv performanceMetrics.json performanceMetrics_4.json + fi + fi + fi + fi fi # now extract all performance metrics if [[ $ALIEN_JDL_EXTRACTMETRICS == "1" ]]; then IFS=$'\n' timeStart=`date +%s` - for perfMetricsFiles in performanceMetrics.json performanceMetrics_1.json performanceMetrics_2.json performanceMetrics_3.json ; do + for perfMetricsFiles in performanceMetrics.json performanceMetrics_1.json performanceMetrics_2.json performanceMetrics_3.json performanceMetrics_4.json ; do suffix=`echo $perfMetricsFiles | sed 's/performanceMetrics\(.*\).json/\1/'` if [[ -f "performanceMetrics.json" ]]; then for workflow in `grep ': {' $perfMetricsFiles`; do From c95a0484615cc6970332498d1d437f40ca84c173 Mon Sep 17 00:00:00 2001 From: Jeremy Wilkinson Date: Sat, 2 Mar 2024 10:11:30 +0100 Subject: [PATCH 091/101] Fix converter logic for o2-analysis-v0converter in test workflow (#1493) * fix converter logic for o2v0converter in test workflow * add ft0-corrected-table --- MC/analysis_testing/o2dpg_analysis_test_workflow.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/MC/analysis_testing/o2dpg_analysis_test_workflow.py b/MC/analysis_testing/o2dpg_analysis_test_workflow.py index c50ed6999..de50128ed 100755 --- a/MC/analysis_testing/o2dpg_analysis_test_workflow.py +++ b/MC/analysis_testing/o2dpg_analysis_test_workflow.py @@ -190,8 +190,9 @@ def get_additional_workflows(input_aod): o2_analysis_converters = {"O2collision_001": "o2-analysis-collision-converter --doNotSwap", "O2zdc_001": "o2-analysis-zdc-converter", "O2bc_001": "o2-analysis-bc-converter", - "O2v0_001": "o2-analysis-v0converter", - "O2trackextra_001": "o2-analysis-tracks-extra-converter"} + "O2v0_002": "o2-analysis-v0converter", + "O2trackextra_001": "o2-analysis-tracks-extra-converter", + "O2ft0corrected": "o2-analysis-ft0-corrected-table"} for i in froot.GetListOfKeys(): if "DF_" not in i.GetName(): continue From 3d0840e5094d8ed12cc73b586aa91a6b05970a1e Mon Sep 17 00:00:00 2001 From: benedikt-voelkel Date: Mon, 4 Mar 2024 12:13:49 +0100 Subject: [PATCH 092/101] [SimWF] Use __global_init_task__ more consistently (#1518) * centralise function that creates the task * apply also when using AnalysisQC CLI Co-authored-by: Benedikt Volkel --- .../o2dpg_analysis_test_workflow.py | 8 +++-- MC/bin/o2dpg_sim_workflow.py | 18 ++-------- MC/bin/o2dpg_workflow_utils.py | 33 +++++++++++++++++-- 3 files changed, 39 insertions(+), 20 deletions(-) diff --git a/MC/analysis_testing/o2dpg_analysis_test_workflow.py b/MC/analysis_testing/o2dpg_analysis_test_workflow.py index de50128ed..2bcd2038a 100755 --- a/MC/analysis_testing/o2dpg_analysis_test_workflow.py +++ b/MC/analysis_testing/o2dpg_analysis_test_workflow.py @@ -80,7 +80,7 @@ o2dpg_workflow_utils = importlib.util.module_from_spec(spec) sys.modules[module_name] = o2dpg_workflow_utils spec.loader.exec_module(o2dpg_workflow_utils) -from o2dpg_workflow_utils import createTask, dump_workflow +from o2dpg_workflow_utils import createTask, dump_workflow, createGlobalInitTask module_name = "o2dpg_analysis_test_utils" spec = importlib.util.spec_from_file_location(module_name, join(O2DPG_ROOT, "MC", "analysis_testing", "o2dpg_analysis_test_utils.py")) @@ -322,7 +322,9 @@ def run(args): print("ERROR: QC upload was requested, however in that case a --pass-name and --period-name are required") return 1 - workflow = [] + ### setup global environment variables which are valid for all tasks, set as first task + global_env = {"ALICEO2_CCDB_CONDITION_NOT_AFTER": args.condition_not_after} if args.condition_not_after else None + workflow = [createGlobalInitTask(global_env)] add_analysis_tasks(workflow, args.input_file, expanduser(args.analysis_dir), is_mc=args.is_mc, analyses_only=args.only_analyses, autoset_converters=args.autoset_converters, include_disabled_analyses=args.include_disabled, timeout=args.timeout, collision_system=args.collision_system, add_common_args=args.add_common_args) if args.with_qc_upload: add_analysis_qc_upload_tasks(workflow, args.period_name, args.run_number, args.pass_name) @@ -350,6 +352,8 @@ def main(): parser.add_argument("--timeout", type=int, default=None, help="Timeout for analysis tasks in seconds.") parser.add_argument("--collision-system", dest="collision_system", help="Set the collision system. If not set, tried to be derived from ALIEN_JDL_LPMInterationType. Fallback to pp") parser.add_argument("--add-common-args", dest="add_common_args", nargs="*", help="Pass additional common arguments per analysis, for instance --add-common-args EMCAL-shm-segment-size 2500000000 will add --shm-segment-size 2500000000 to the EMCAL analysis") + parser.add_argument('--condition-not-after', dest="condition_not_after", type=int, help="only consider CCDB objects not created after this timestamp (for TimeMachine)", default=3385078236000) + parser.set_defaults(func=run) args = parser.parse_args() return(args.func(args)) diff --git a/MC/bin/o2dpg_sim_workflow.py b/MC/bin/o2dpg_sim_workflow.py index d0812d42c..31d2ad06a 100755 --- a/MC/bin/o2dpg_sim_workflow.py +++ b/MC/bin/o2dpg_sim_workflow.py @@ -20,7 +20,7 @@ import sys import importlib.util import argparse -from os import environ, mkdir, getcwd +from os import environ, mkdir from os.path import join, dirname, isdir, isabs import random import json @@ -326,20 +326,8 @@ def extractVertexArgs(configKeyValuesStr, finalDiamondDict): workflow['stages'] = [] ### setup global environment variables which are valid for all tasks -globalenv = {} -if args.condition_not_after: - # this is for the time-machine CCDB mechanism - globalenv['ALICEO2_CCDB_CONDITION_NOT_AFTER'] = args.condition_not_after - # this is enforcing the use of local CCDB caching - if environ.get('ALICEO2_CCDB_LOCALCACHE') == None: - print ("ALICEO2_CCDB_LOCALCACHE not set; setting to default " + getcwd() + '/ccdb') - globalenv['ALICEO2_CCDB_LOCALCACHE'] = getcwd() + "/ccdb" - else: - # fixes the workflow to use and remember externally provided path - globalenv['ALICEO2_CCDB_LOCALCACHE'] = environ.get('ALICEO2_CCDB_LOCALCACHE') - globalenv['IGNORE_VALIDITYCHECK_OF_CCDB_LOCALCACHE'] = '${ALICEO2_CCDB_LOCALCACHE:+"ON"}' - -globalinittask = createGlobalInitTask(globalenv) +global_env = {'ALICEO2_CCDB_CONDITION_NOT_AFTER': args.condition_not_after} if args.condition_not_after else None +globalinittask = createGlobalInitTask(global_env) globalinittask['cmd'] = 'o2-ccdb-cleansemaphores -p ${ALICEO2_CCDB_LOCALCACHE}' workflow['stages'].append(globalinittask) #### diff --git a/MC/bin/o2dpg_workflow_utils.py b/MC/bin/o2dpg_workflow_utils.py index 748129de2..18fd600c9 100755 --- a/MC/bin/o2dpg_workflow_utils.py +++ b/MC/bin/o2dpg_workflow_utils.py @@ -1,5 +1,6 @@ #!/usr/bin/env python3 +from os import environ, getcwd from copy import deepcopy import json @@ -84,18 +85,44 @@ def createTask(name='', needs=[], tf=-1, cwd='./', lab=[], cpu=1, relative_cpu=N 'cwd' : cwd } -def createGlobalInitTask(envdict): +def createGlobalInitTask(keys_values=None, set_defaults=True): """Returns a special task that is recognized by the executor as a task whose environment section is to be globally applied to all tasks of a workflow. - envdict: dictionary of environment variables and values to be globally applied to all tasks + Args: + keys_values: dict or None + dictionary of environment variables and values to be globally applied to all tasks + if sharing keys with defaults, keys_values takes precedence + set_defaults: bool + whether or not some default values will be added + + Returns: + dict: task dictionary """ + + # dictionary holding global environment to be passed to task + env_dict = {} + + if set_defaults: + if environ.get('ALICEO2_CCDB_LOCALCACHE') is None: + print ("ALICEO2_CCDB_LOCALCACHE not set; setting to default " + getcwd() + '/ccdb') + env_dict['ALICEO2_CCDB_LOCALCACHE'] = getcwd() + "/ccdb" + else: + # fixes the workflow to use and remember externally provided path + env_dict['ALICEO2_CCDB_LOCALCACHE'] = environ.get('ALICEO2_CCDB_LOCALCACHE') + env_dict['IGNORE_VALIDITYCHECK_OF_CCDB_LOCALCACHE'] = '${ALICEO2_CCDB_LOCALCACHE:+"ON"}' + + if keys_values: + # keys_values takes priority in case of same keys + env_dict |= keys_values + t = createTask(name = '__global_init_task__') t['cmd'] = 'NO-COMMAND' - t['env'] = envdict + t['env'] = env_dict return t + def summary_workflow(workflow): print("=== WORKFLOW SUMMARY ===\n") print(f"-> There are {len(workflow)} tasks") From 826526fdecf78c66359eba4f3fa3ef4c1cdc6973 Mon Sep 17 00:00:00 2001 From: benedikt-voelkel Date: Mon, 4 Mar 2024 15:29:35 +0100 Subject: [PATCH 093/101] Run only analyses, no QCDB upload (#1519) Achieved by changing `-tt Analysis_` to `--target-labels Analysis`. Upload tasks to not have that label, instead they can be triggered with `--target-labels AnalysisUpload` --- MC/run/examples/O2DPG_pp_minbias.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/MC/run/examples/O2DPG_pp_minbias.sh b/MC/run/examples/O2DPG_pp_minbias.sh index be23c9d80..f9b2a99c7 100755 --- a/MC/run/examples/O2DPG_pp_minbias.sh +++ b/MC/run/examples/O2DPG_pp_minbias.sh @@ -58,7 +58,7 @@ fi RETANA=0 if [ "${DOANALYSIS}" != "" ] && [ "${RETMC}" = "0" ]; then # run test analyses if requested - ${O2DPG_ROOT}/MC/bin/o2_dpg_workflow_runner.py -f workflow.json -tt "Analysis_" ${MEMLIMIT} ${CPULIMIT} + ${O2DPG_ROOT}/MC/bin/o2_dpg_workflow_runner.py -f workflow.json --target-labels Analysis ${MEMLIMIT} ${CPULIMIT} RETANA=${?} fi From 52c6168d3a6162e4a4e5eb9b932e2897446f0060 Mon Sep 17 00:00:00 2001 From: Chiara Zampolli Date: Wed, 6 Mar 2024 12:52:31 +0100 Subject: [PATCH 094/101] Possibility to set a rate limiting from JDL for split WF --- DATA/production/configurations/asyncReco/async_pass.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/DATA/production/configurations/asyncReco/async_pass.sh b/DATA/production/configurations/asyncReco/async_pass.sh index 8f1e9e653..17d502b01 100755 --- a/DATA/production/configurations/asyncReco/async_pass.sh +++ b/DATA/production/configurations/asyncReco/async_pass.sh @@ -586,7 +586,7 @@ else echo "WORKFLOW_PARAMETERS=$WORKFLOW_PARAMETERS" echo "Step 3) matching, calib, AOD, potentially QC" echo -e "\nStep 3) matching, calib, AOD, potentially QC" >> workflowconfig.log - export TIMEFRAME_RATE_LIMIT=0 + export TIMEFRAME_RATE_LIMIT=${ALIEN_JDL_TIMEFRAMERATELIMITSSPLITWF:-0} echo "Removing detectors $DETECTORS_EXCLUDE" READER_DELAY=${ALIEN_JDL_READERDELAY:-30} export ARGS_EXTRA_PROCESS_o2_global_track_cluster_reader+=" --reader-delay $READER_DELAY " @@ -620,7 +620,7 @@ else echo "WORKFLOW_PARAMETERS=$WORKFLOW_PARAMETERS" echo "Step 4) QC" echo -e "\nStep 4) QC" >> workflowconfig.log - export TIMEFRAME_RATE_LIMIT=0 + export TIMEFRAME_RATE_LIMIT=${ALIEN_JDL_TIMEFRAMERATELIMITSSPLITWF:-0} echo "Removing detectors $DETECTORS_EXCLUDE" env $SETTING_ROOT_OUTPUT IS_SIMULATED_DATA=0 WORKFLOWMODE=print TFDELAY=$TFDELAYSECONDS WORKFLOW_DETECTORS=ALL WORKFLOW_DETECTORS_EXCLUDE=$DETECTORS_EXCLUDE WORKFLOW_DETECTORS_USE_GLOBAL_READER_TRACKS=ALL WORKFLOW_DETECTORS_USE_GLOBAL_READER_CLUSTERS=ALL WORKFLOW_DETECTORS_EXCLUDE_GLOBAL_READER_TRACKS=HMP WORKFLOW_DETECTORS_EXCLUDE_QC=CPV,$DETECTORS_EXCLUDE ./run-workflow-on-inputlist.sh $INPUT_TYPE list.list >> workflowconfig.log # run it From 97008600b2735504012173a1983adc76ceaa81ed Mon Sep 17 00:00:00 2001 From: Sandro Wenzel Date: Wed, 6 Mar 2024 14:10:57 +0100 Subject: [PATCH 095/101] stability fixes in runGRIDContainerized.sh --- GRID/utils/runGRIDContainerized.sh | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/GRID/utils/runGRIDContainerized.sh b/GRID/utils/runGRIDContainerized.sh index 1752f692d..cb57a7a1c 100755 --- a/GRID/utils/runGRIDContainerized.sh +++ b/GRID/utils/runGRIDContainerized.sh @@ -9,7 +9,7 @@ echo "Trying to run script ${SCRIPT} in a container environment" # detect architecture (ARM or X86) ARCH=$(uname -i) -if [ "$ARCH" == "aarch64" ] || [ "$arch" == "x86" ]; then +if [ "$ARCH" == "aarch64" ] || [ "$ARCH" == "x86_64" ]; then echo "Detected hardware architecture : $ARCH" else echo "Invalid architecture ${ARCH} detected. Exiting" @@ -35,9 +35,9 @@ fi # copy script to WORK_DIR cp ${SCRIPT} ${WORK_DIR}/job.sh -# export certificates (need to be created before) -ALIEN_CERTFILE=$(ls -t /tmp/tokencert_*.pem 2> /dev/null | head -n 1) -ALIEN_KEYFILE=$(ls -t /tmp/tokenkey_*.pem 2> /dev/null | head -n 1) +# export certificates - belonging to current user (need to be created before) +ALIEN_CERTFILE=$(find /tmp -type f -name 'tokencert*pem' -user `whoami` 2> /dev/null) +ALIEN_KEYFILE=$(find /tmp -type f -name 'tokenkey*pem' -user `whoami` 2> /dev/null) [ "${ALIEN_CERTFILE}" == "" ] && echo "No certificate file found; Initialize a token with alien-init-token or similar" && exit 1 [ "${ALIEN_KEYFILE}" == "" ] && echo "No certificate file found; Initialize a token with alien-init-token or similar" && exit 1 From 73fe052ce6d44c5d73eb6f035ff9ac318a805426 Mon Sep 17 00:00:00 2001 From: Timo Wilken Date: Wed, 6 Mar 2024 14:22:54 +0100 Subject: [PATCH 096/101] Fix typo in CODEOWNERS (#1525) @aphecetche's username was misspelt. Also added @aphecetche with write permissions -- otherwise code ownership doesn't work. --- CODEOWNERS | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/CODEOWNERS b/CODEOWNERS index f697dcaf8..27ea257cd 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -8,9 +8,9 @@ /DATA/testing/detectors/FV0 /DATA/testing/detectors/HMP /DATA/testing/detectors/ITS -/DATA/testing/detectors/MCH @aphecethce +/DATA/testing/detectors/MCH @aphecetche /DATA/testing/detectors/MFT -/DATA/testing/detectors/MID @aphecethce +/DATA/testing/detectors/MID @aphecetche /DATA/testing/detectors/PHS /DATA/testing/detectors/TOF @noferini @chiarazampolli /DATA/testing/detectors/TPC @wiechula From 10b933ce4b9924246723ce8ac04434584f4cfc34 Mon Sep 17 00:00:00 2001 From: Nasir Mehdi Malik <89008506+nasirmehdimalik@users.noreply.github.com> Date: Thu, 7 Mar 2024 19:27:58 +0530 Subject: [PATCH 097/101] TRD: add tracking QC task for mc workflow (#1467) * TRD;) tracking task in mc workflow * rmove digit query from tracklet task * removed not maxNumberCycles, applicable items * trd-digit-task name change to trd-standalone-task * Update MC/config/QC/json/trd-tracking-task.json Co-authored-by: Ole Schmidt * Update MC/config/QC/json/trd-tracking-task.json * readerCommand configured with TPC_TRD --------- Co-authored-by: Ole Schmidt --- MC/bin/o2dpg_qc_finalization_workflow.py | 3 +- MC/bin/o2dpg_sim_workflow.py | 7 ++- ...its-task.json => trd-standalone-task.json} | 23 ++++++---- MC/config/QC/json/trd-tracking-task.json | 44 +++++++++++++++++++ 4 files changed, 67 insertions(+), 10 deletions(-) rename MC/config/QC/json/{trd-digits-task.json => trd-standalone-task.json} (66%) create mode 100644 MC/config/QC/json/trd-tracking-task.json diff --git a/MC/bin/o2dpg_qc_finalization_workflow.py b/MC/bin/o2dpg_qc_finalization_workflow.py index 373989f20..6908b4956 100755 --- a/MC/bin/o2dpg_qc_finalization_workflow.py +++ b/MC/bin/o2dpg_qc_finalization_workflow.py @@ -83,7 +83,8 @@ def add_QC_postprocessing(taskName, qcConfigPath, needs, runSpecific, prodSpecif add_QC_finalization('emcBCQC', 'json://${O2DPG_ROOT}/MC/config/QC/json/emc-reco-tasks.json') #add_QC_finalization('tpcTrackingQC', 'json://${O2DPG_ROOT}/MC/config/QC/json/tpc-qc-tracking-direct.json') add_QC_finalization('tpcStandardQC', 'json://${O2DPG_ROOT}/MC/config/QC/json/tpc-qc-standard-direct.json') - add_QC_finalization('trdDigitsQC', 'json://${O2DPG_ROOT}/MC/config/QC/json/trd-digits-task.json') + add_QC_finalization('trdDigitsQC', 'json://${O2DPG_ROOT}/MC/config/QC/json/trd-standalone-task.json') + add_QC_finalization('trdTrackingQC', 'json://${O2DPG_ROOT}/MC/config/QC/json/trd-tracking-task.json') add_QC_finalization('vertexQC', 'json://${O2DPG_ROOT}/MC/config/QC/json/vertexing-qc-direct-mc.json') add_QC_finalization('ITSTPCmatchQC', 'json://${O2DPG_ROOT}/MC/config/QC/json/ITSTPCmatchedTracks_direct_MC.json') add_QC_finalization('TOFMatchQC', 'json://${O2DPG_ROOT}/MC/config/QC/json/tofMatchedTracks_ITSTPCTOF_TPCTOF_direct_MC.json') diff --git a/MC/bin/o2dpg_sim_workflow.py b/MC/bin/o2dpg_sim_workflow.py index 31d2ad06a..3cf34a924 100755 --- a/MC/bin/o2dpg_sim_workflow.py +++ b/MC/bin/o2dpg_sim_workflow.py @@ -1196,7 +1196,12 @@ def addQCPerTF(taskName, needs, readerCommand, configFilePath, objectsFile=''): addQCPerTF(taskName='trdDigitsQC', needs=[TRDDigitask['name']], readerCommand='o2-trd-trap-sim', - configFilePath='json://${O2DPG_ROOT}/MC/config/QC/json/trd-digits-task.json') + configFilePath='json://${O2DPG_ROOT}/MC/config/QC/json/trd-standalone-task.json') + + addQCPerTF(taskName='trdTrackingQC', + needs=[TRDTRACKINGtask2['name']], + readerCommand='o2-global-track-cluster-reader --track-types "ITS-TPC-TRD,TPC-TRD" --cluster-types none', + configFilePath='json://${O2DPG_ROOT}/MC/config/QC/json/trd-tracking-task.json') ### TOF addQCPerTF(taskName='tofDigitsQC', diff --git a/MC/config/QC/json/trd-digits-task.json b/MC/config/QC/json/trd-standalone-task.json similarity index 66% rename from MC/config/QC/json/trd-digits-task.json rename to MC/config/QC/json/trd-standalone-task.json index ae05bdfd3..8ad3f7523 100644 --- a/MC/config/QC/json/trd-digits-task.json +++ b/MC/config/QC/json/trd-standalone-task.json @@ -3,11 +3,8 @@ "config": { "database": { "implementation": "CCDB", - "host": "ccdb-test.cern.ch:8080", - "username": "not_applicable", - "password": "not_applicable", - "name": "not_applicable" - }, + "host": "ccdb-test.cern.ch:8080" + }, "Activity": { "number": "42", "type": "2", @@ -31,8 +28,7 @@ "className": "o2::quality_control_modules::trd::DigitsTask", "moduleName": "QcTRD", "detectorName": "TRD", - "cycleDurationSeconds": "60", - "maxNumberCycles": "-1", + "cycleDurationSeconds": "60", "dataSource": { "type": "direct", "query": "digits:TRD/DIGITS;tracklets:TRD/TRACKLETS;triggers:TRD/TRKTRGRD;noiseMap:TRD/NOISEMAP/0?lifetime=condition&ccdb-path=TRD/Calib/NoiseMapMCM;chamberStatus:TRD/CHSTATUS/0?lifetime=condition&ccdb-path=TRD/Calib/HalfChamberStatusQC;fedChamberStatus:TRD/FCHSTATUS/0?lifetime=condition&ccdb-path=TRD/Calib/DCSDPsFedChamberStatus" @@ -43,7 +39,18 @@ "pulseheightpeaklower": "1.0", "pulseheightpeakupper": "5.0" } - } + }, + "Tracklets": { + "active": "true", + "className": "o2::quality_control_modules::trd::TrackletsTask", + "moduleName": "QcTRD", + "detectorName": "TRD", + "cycleDurationSeconds": "60", + "dataSource": { + "type": "direct", + "query": "tracklets:TRD/TRACKLETS;triggers:TRD/TRKTRGRD;noiseMap:TRD/NOISEMAP/0?lifetime=condition&ccdb-path=TRD/Calib/NoiseMapMCM;chamberStatus:TRD/CHSTATUS/0?lifetime=condition&ccdb-path=TRD/Calib/HalfChamberStatusQC;fedChamberStatus:TRD/FCHSTATUS/0?lifetime=condition&ccdb-path=TRD/Calib/DCSDPsFedChamberStatus" + } + } }, "dataSamplingPolicies": [] } diff --git a/MC/config/QC/json/trd-tracking-task.json b/MC/config/QC/json/trd-tracking-task.json new file mode 100644 index 000000000..f8093814f --- /dev/null +++ b/MC/config/QC/json/trd-tracking-task.json @@ -0,0 +1,44 @@ +{ + "qc": { + "config": { + "database": { + "implementation": "CCDB", + "host": "ccdb-test.cern.ch:8080" + }, + "Activity": { + "number": "42", + "type": "2", + "provenance": "qc_mc", + "passName": "passMC", + "periodName": "SimChallenge" + }, + "monitoring": { + "url": "no-op://" + }, + "consul": { + "url": "" + }, + "conditionDB": { + "url": "alice-ccdb.cern.ch" + } + }, + "tasks": { + "Tracking": { + "active": "true", + "className": "o2::quality_control_modules::trd::TrackingTask", + "moduleName": "QcTRD", + "detectorName": "TRD", + "cycleDurationSeconds": "60", + "dataSource": { + "type": "direct", + "query": "trackITSTPCTRD:TRD/MATCH_ITSTPC;trigITSTPCTRD:TRD/TRGREC_ITSTPC;trackTPCTRD:TRD/MATCH_TPC;trigTPCTRD:TRD/TRGREC_TPC" + }, + "taskParameters": { + "detailedQC": "false", + "trackSources": "ITS-TPC-TRD,TPC-TRD" + } + } + }, + "dataSamplingPolicies": [] + } +} From 9943c5b23d125c2947ff892498570fb09060469a Mon Sep 17 00:00:00 2001 From: Diana <70915994+diana0x0f@users.noreply.github.com> Date: Fri, 8 Mar 2024 10:44:36 +0100 Subject: [PATCH 098/101] MFT: new MC track QC task (#1468) --- MC/bin/o2dpg_qc_finalization_workflow.py | 1 + MC/bin/o2dpg_sim_workflow.py | 4 ++ MC/config/QC/json/mft-tracks-mc.json | 49 ++++++++++++++++++++++++ 3 files changed, 54 insertions(+) create mode 100644 MC/config/QC/json/mft-tracks-mc.json diff --git a/MC/bin/o2dpg_qc_finalization_workflow.py b/MC/bin/o2dpg_qc_finalization_workflow.py index 6908b4956..593f7a280 100755 --- a/MC/bin/o2dpg_qc_finalization_workflow.py +++ b/MC/bin/o2dpg_qc_finalization_workflow.py @@ -79,6 +79,7 @@ def add_QC_postprocessing(taskName, qcConfigPath, needs, runSpecific, prodSpecif add_QC_finalization('mftDigitsQC', 'json://${O2DPG_ROOT}/MC/config/QC/json/mft-digits-0.json', MFTDigitsQCneeds) add_QC_finalization('mftClustersQC', 'json://${O2DPG_ROOT}/MC/config/QC/json/mft-clusters.json') add_QC_finalization('mftTracksQC', 'json://${O2DPG_ROOT}/MC/config/QC/json/mft-tracks.json') + add_QC_finalization('mftMCTracksQC', 'json://${O2DPG_ROOT}/MC/config/QC/json/mft-tracks-mc.json') add_QC_finalization('emcRecoQC', 'json://${O2DPG_ROOT}/MC/config/QC/json/emc-reco-tasks.json') add_QC_finalization('emcBCQC', 'json://${O2DPG_ROOT}/MC/config/QC/json/emc-reco-tasks.json') #add_QC_finalization('tpcTrackingQC', 'json://${O2DPG_ROOT}/MC/config/QC/json/tpc-qc-tracking-direct.json') diff --git a/MC/bin/o2dpg_sim_workflow.py b/MC/bin/o2dpg_sim_workflow.py index 3cf34a924..b97c94da3 100755 --- a/MC/bin/o2dpg_sim_workflow.py +++ b/MC/bin/o2dpg_sim_workflow.py @@ -1180,6 +1180,10 @@ def addQCPerTF(taskName, needs, readerCommand, configFilePath, objectsFile=''): needs=[MFTRECOtask['name']], readerCommand='o2-global-track-cluster-reader --track-types MFT --cluster-types MFT', configFilePath='json://${O2DPG_ROOT}/MC/config/QC/json/mft-tracks.json') + addQCPerTF(taskName='mftMCTracksQC', + needs=[MFTRECOtask['name']], + readerCommand='o2-global-track-cluster-reader --track-types MFT --cluster-types MFT', + configFilePath='json://${O2DPG_ROOT}/MC/config/QC/json/mft-tracks-mc.json') ### TPC # addQCPerTF(taskName='tpcTrackingQC', diff --git a/MC/config/QC/json/mft-tracks-mc.json b/MC/config/QC/json/mft-tracks-mc.json new file mode 100644 index 000000000..1dcd7774d --- /dev/null +++ b/MC/config/QC/json/mft-tracks-mc.json @@ -0,0 +1,49 @@ +{ + "qc" : { + "config" : { + "database" : { + "implementation" : "CCDB", + "host" : "ccdb-test.cern.ch:8080", + "username" : "not_applicable", + "password" : "not_applicable", + "name" : "not_applicable" + }, + "Activity" : { + "number" : "42", + "type" : "2", + "provenance": "qc_mc", + "passName": "passMC", + "periodName": "SimChallenge" + }, + "monitoring" : { + "url" : "no-op://" + }, + "consul" : { + "url" : "" + }, + "conditionDB" : { + "url" : "alice-ccdb.cern.ch" + } + }, + "tasks" : { + "TracksMC" : { + "active" : "true", + "className" : "o2::quality_control_modules::mft::QcMFTTrackMCTask", + "moduleName" : "QcMFT", + "detectorName" : "MFT", + "cycleDurationSeconds" : "30", + "maxNumberCycles" : "-1", + "dataSource_comment" : "The other type of dataSource is \"direct\", see basic-no-sampling.json.", + "dataSource" : { + "type" : "direct", + "query" : "tracks:MFT/TRACKS/0;mctruth:MFT/TRACKSMCTR/0" + }, + "location" : "remote", + "taskParameters" : { + "collisionsContextPath": "./collisioncontext.root" + } + } + } + } + } + \ No newline at end of file From e059331566317ae9679d50d5ceb6c6d1a37daa18 Mon Sep 17 00:00:00 2001 From: Chiara Zampolli Date: Fri, 8 Mar 2024 15:41:36 +0100 Subject: [PATCH 099/101] Adjusting the logic for the rate limiting in split wf --- .../configurations/asyncReco/async_pass.sh | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/DATA/production/configurations/asyncReco/async_pass.sh b/DATA/production/configurations/asyncReco/async_pass.sh index 17d502b01..e5f2e3284 100755 --- a/DATA/production/configurations/asyncReco/async_pass.sh +++ b/DATA/production/configurations/asyncReco/async_pass.sh @@ -586,10 +586,15 @@ else echo "WORKFLOW_PARAMETERS=$WORKFLOW_PARAMETERS" echo "Step 3) matching, calib, AOD, potentially QC" echo -e "\nStep 3) matching, calib, AOD, potentially QC" >> workflowconfig.log - export TIMEFRAME_RATE_LIMIT=${ALIEN_JDL_TIMEFRAMERATELIMITSSPLITWF:-0} + # This uses the same time frame rate limiting as in full wf, unless differently specified in the JDL + export TIMEFRAME_RATE_LIMIT=${ALIEN_JDL_TIMEFRAMERATELIMITSSPLITWF:-${TIMEFRAME_RATE_LIMIT}} echo "Removing detectors $DETECTORS_EXCLUDE" - READER_DELAY=${ALIEN_JDL_READERDELAY:-30} - export ARGS_EXTRA_PROCESS_o2_global_track_cluster_reader+=" --reader-delay $READER_DELAY " + if [[ $ALIEN_JDL_USEREADERDELAY == 1 ]]; then + # if we add a delay, the rate limiting should be disabled + TIMEFRAME_RATE_LIMIT=0 + READER_DELAY=${ALIEN_JDL_READERDELAY:-30} + export ARGS_EXTRA_PROCESS_o2_global_track_cluster_reader+=" --reader-delay $READER_DELAY " + fi echo "extra args are $ARGS_EXTRA_PROCESS_o2_global_track_cluster_reader" env $SETTING_ROOT_OUTPUT IS_SIMULATED_DATA=0 WORKFLOWMODE=print TFDELAY=$TFDELAYSECONDS WORKFLOW_DETECTORS=ALL WORKFLOW_DETECTORS_EXCLUDE=$DETECTORS_EXCLUDE WORKFLOW_DETECTORS_USE_GLOBAL_READER_TRACKS=ALL WORKFLOW_DETECTORS_USE_GLOBAL_READER_CLUSTERS=ALL WORKFLOW_DETECTORS_EXCLUDE_GLOBAL_READER_TRACKS=HMP WORKFLOW_DETECTORS_EXCLUDE_QC=CPV,$DETECTORS_EXCLUDE ./run-workflow-on-inputlist.sh $INPUT_TYPE list.list >> workflowconfig.log # run it @@ -620,8 +625,8 @@ else echo "WORKFLOW_PARAMETERS=$WORKFLOW_PARAMETERS" echo "Step 4) QC" echo -e "\nStep 4) QC" >> workflowconfig.log - export TIMEFRAME_RATE_LIMIT=${ALIEN_JDL_TIMEFRAMERATELIMITSSPLITWF:-0} echo "Removing detectors $DETECTORS_EXCLUDE" + echo "The rate limiting will be the same as in step 3: TIMEFRAME_RATE_LIMIT = ${TIMEFRAME_RATE_LIMIT}" env $SETTING_ROOT_OUTPUT IS_SIMULATED_DATA=0 WORKFLOWMODE=print TFDELAY=$TFDELAYSECONDS WORKFLOW_DETECTORS=ALL WORKFLOW_DETECTORS_EXCLUDE=$DETECTORS_EXCLUDE WORKFLOW_DETECTORS_USE_GLOBAL_READER_TRACKS=ALL WORKFLOW_DETECTORS_USE_GLOBAL_READER_CLUSTERS=ALL WORKFLOW_DETECTORS_EXCLUDE_GLOBAL_READER_TRACKS=HMP WORKFLOW_DETECTORS_EXCLUDE_QC=CPV,$DETECTORS_EXCLUDE ./run-workflow-on-inputlist.sh $INPUT_TYPE list.list >> workflowconfig.log # run it if [[ "0$RUN_WORKFLOW" != "00" ]]; then From 1cde53e752a8e9af7a9c8337e1ff497c8a154e74 Mon Sep 17 00:00:00 2001 From: motomioya <95481703+motomioya@users.noreply.github.com> Date: Sat, 9 Mar 2024 00:57:04 +0900 Subject: [PATCH 100/101] Add files to simulate bbtomuons without forcing semileptonic decay (#1504) --- .../generator/GeneratorBeautyToMu_EvtGen.C | 5 +- .../GeneratorHF_bbbarToDDbarToMuons_fwdy.ini | 23 +++++ .../GeneratorHF_bbbarToDDbarToMuons_fwdy.C | 85 +++++++++++++++++++ .../PWGDQ/runBeautyToMuons_noForce_fwd_pp.sh | 27 ++++++ 4 files changed, 138 insertions(+), 2 deletions(-) create mode 100644 MC/config/PWGDQ/ini/GeneratorHF_bbbarToDDbarToMuons_fwdy.ini create mode 100644 MC/config/PWGDQ/ini/tests/GeneratorHF_bbbarToDDbarToMuons_fwdy.C create mode 100644 MC/run/PWGDQ/runBeautyToMuons_noForce_fwd_pp.sh diff --git a/MC/config/PWGDQ/external/generator/GeneratorBeautyToMu_EvtGen.C b/MC/config/PWGDQ/external/generator/GeneratorBeautyToMu_EvtGen.C index 98b71a00b..3edfb93dc 100644 --- a/MC/config/PWGDQ/external/generator/GeneratorBeautyToMu_EvtGen.C +++ b/MC/config/PWGDQ/external/generator/GeneratorBeautyToMu_EvtGen.C @@ -11,7 +11,7 @@ R__ADD_INCLUDE_PATH($O2DPG_ROOT/MC/config/PWGHF/external/generator) FairGenerator* -GeneratorBeautyToMu_EvtGenFwdY(double rapidityMin = -4.3, double rapidityMax = -2.2, bool ispp = true, bool verbose = false, TString pdgs = "511;521;531;541;5112;5122;5232;5132;5332") +GeneratorBeautyToMu_EvtGenFwdY(double rapidityMin = -4.3, double rapidityMax = -2.2, bool ispp = true, bool forcedecay = true, bool verbose = false, TString pdgs = "511;521;531;541;5112;5122;5232;5132;5332") { auto gen = new o2::eventgen::GeneratorEvtGen(); gen->setRapidity(rapidityMin,rapidityMax); @@ -30,7 +30,8 @@ GeneratorBeautyToMu_EvtGenFwdY(double rapidityMin = -4.3, double rapidityMax = - gen->AddPdg(std::stoi(spdg),i); printf("PDG %d \n",std::stoi(spdg)); } - gen->SetForceDecay(kEvtSemiMuonic); + if(forcedecay) gen->SetForceDecay(kEvtSemiMuonic); + else gen->SetForceDecay(kEvtAll); // set random seed gen->readString("Random:setSeed on"); uint random_seed; diff --git a/MC/config/PWGDQ/ini/GeneratorHF_bbbarToDDbarToMuons_fwdy.ini b/MC/config/PWGDQ/ini/GeneratorHF_bbbarToDDbarToMuons_fwdy.ini new file mode 100644 index 000000000..18a7faf43 --- /dev/null +++ b/MC/config/PWGDQ/ini/GeneratorHF_bbbarToDDbarToMuons_fwdy.ini @@ -0,0 +1,23 @@ + +### The setup uses an external event generator +### This part sets the path of the file and the function call to retrieve it + +[GeneratorExternal] +fileName = ${O2DPG_ROOT}/MC/config/PWGDQ/external/generator/GeneratorBeautyToMu_EvtGen.C +funcName = GeneratorBeautyToMu_EvtGenFwdY(-4.3,-2.3,true,false) + +### The external generator derives from GeneratorPythia8. +### This part configures the bits of the interface: configuration and user hooks + +[GeneratorPythia8] +config = ${O2DPG_ROOT}/MC/config/common/pythia8/generator/pythia8_hf.cfg +hooksFileName = ${O2DPG_ROOT}/MC/config/PWGHF/pythia8/hooks/pythia8_userhooks_qqbar.C +hooksFuncName = pythia8_userhooks_bbbar(-4.3,-2.3) + +### The setup uses an external even generator trigger which is +### defined in the following file and it is retrieved and configured +### according to the specified function call + +[TriggerExternal] +fileName = ${O2DPG_ROOT}/MC/config/PWGDQ/trigger/selectDaughterFromHFwithinAcc.C +funcName = selectDaughterFromHFwithinAcc(13,kTRUE,-4.3,-2.3) diff --git a/MC/config/PWGDQ/ini/tests/GeneratorHF_bbbarToDDbarToMuons_fwdy.C b/MC/config/PWGDQ/ini/tests/GeneratorHF_bbbarToDDbarToMuons_fwdy.C new file mode 100644 index 000000000..271e0ff39 --- /dev/null +++ b/MC/config/PWGDQ/ini/tests/GeneratorHF_bbbarToDDbarToMuons_fwdy.C @@ -0,0 +1,85 @@ +int External() +{ + int checkPdgDecay = 13; + std::string path{"o2sim_Kine.root"}; + TFile file(path.c_str(), "READ"); + if (file.IsZombie()) { + std::cerr << "Cannot open ROOT file " << path << "\n"; + return 1; + } + auto tree = (TTree*)file.Get("o2sim"); + std::vector* tracks{}; + tree->SetBranchAddress("MCTrack", &tracks); + + int nLeptons{}; + int nLeptonsInAcceptance{}; + int nLeptonsToBeDone{}; + int nSignalPairs{}; + int nLeptonPairs{}; + int nLeptonPairsInAcceptance{}; + int nLeptonPairsToBeDone{}; + auto nEvents = tree->GetEntries(); + + for (int i = 0; i < nEvents; i++) { + tree->GetEntry(i); + int nleptonseinacc = 0; + int nleptonse = 0; + int nleptonseToBeDone = 0; + int nopenHeavy = 0; + for (auto& track : *tracks) { + auto pdg = track.GetPdgCode(); + auto y = track.GetRapidity(); + if (std::abs(pdg) == checkPdgDecay) { + int igmother = track.getMotherTrackId(); + if (igmother > 0) { + auto gmTrack = (*tracks)[igmother]; + int gmpdg = gmTrack.GetPdgCode(); + if ( int(std::abs(gmpdg)/100.) == 4 || int(std::abs(gmpdg)/1000.) == 4 || int(std::abs(gmpdg)/100.) == 5 || int(std::abs(gmpdg)/1000.) == 5 ) { + nLeptons++; + nleptonse++; + if (-4.3 < y && y < -2.3) { + nleptonseinacc++; + nLeptonsInAcceptance++; + } + if (track.getToBeDone()) { + nLeptonsToBeDone++; + nleptonseToBeDone++; + } + } + } + } else if (std::abs(pdg) == 411 || std::abs(pdg) == 421 || std::abs(pdg) == 431 || std::abs(pdg) == 4122 || std::abs(pdg) == 4132 || std::abs(pdg) == 4232 || std::abs(pdg) == 4332 || std::abs(pdg) == 511 || std::abs(pdg) == 521 || std::abs(pdg) == 531 || std::abs(pdg) == 541 || std::abs(pdg) == 5112 || std::abs(pdg) == 5122 || std::abs(pdg) == 5232 || std::abs(pdg) == 5132 || std::abs(pdg) == 5332) { + nopenHeavy++; + } + } + if (nopenHeavy > 1) nSignalPairs++; + if (nleptonse > 1) nLeptonPairs++; + if (nleptonseToBeDone > 1) nLeptonPairsToBeDone++; + if (nleptonseinacc > 1) nLeptonPairsInAcceptance++; + } + std::cout << "#events: " << nEvents << "\n" + << "#muons in acceptance: " << nLeptonsInAcceptance << "\n" + << "#muon pairs in acceptance: " << nLeptonPairsInAcceptance << "\n" + << "#muons: " << nLeptons << "\n" + << "#muons to be done: " << nLeptonsToBeDone << "\n" + << "#signal pairs: " << nSignalPairs << "\n" + << "#muon pairs: " << nLeptonPairs << "\n" + << "#muon pairs to be done: " << nLeptonPairsToBeDone << "\n"; + if (nSignalPairs <= nLeptonPairs) { + std::cerr << "Number of muon pairs should be less than HF hadron pairs\n"; + return 1; + } + if (nSignalPairs < nEvents) { + std::cerr << "Number of signal pairs should be at least equaled to the number of events\n"; + return 1; + } + if (nLeptonPairs != nLeptonPairsToBeDone) { + std::cerr << "The number of muon pairs should be the same as the number of muon pairs which should be transported.\n"; + return 1; + } + if (nLeptons != nLeptonsToBeDone) { + std::cerr << "The number of muons should be the same as the number of muons which should be transported.\n"; + return 1; + } + + return 0; +} diff --git a/MC/run/PWGDQ/runBeautyToMuons_noForce_fwd_pp.sh b/MC/run/PWGDQ/runBeautyToMuons_noForce_fwd_pp.sh new file mode 100644 index 000000000..ded2c646b --- /dev/null +++ b/MC/run/PWGDQ/runBeautyToMuons_noForce_fwd_pp.sh @@ -0,0 +1,27 @@ +#!/usr/bin/env bash + +# make sure O2DPG + O2 is loaded +[ ! "${O2DPG_ROOT}" ] && echo "Error: This needs O2DPG loaded" && exit 1 +[ ! "${O2_ROOT}" ] && echo "Error: This needs O2 loaded" && exit 1 + +# ----------- SETUP LOCAL CCDB CACHE -------------------------- +export ALICEO2_CCDB_LOCALCACHE=$PWD/.ccdb + + +# ----------- LOAD UTILITY FUNCTIONS -------------------------- +. ${O2_ROOT}/share/scripts/jobutils.sh + +RNDSEED=${RNDSEED:-0} +NSIGEVENTS=${NSIGEVENTS:-1} +NBKGEVENTS=${NBKGEVENTS:-1} +NWORKERS=${NWORKERS:-8} +NTIMEFRAMES=${NTIMEFRAMES:-1} + + +${O2DPG_ROOT}/MC/bin/o2dpg_sim_workflow.py -eCM 13600 -gen external -j ${NWORKERS} -ns ${NSIGEVENTS} -tf ${NTIMEFRAMES} -e TGeant4 -mod "MCH MFT MID ITS" \ + -trigger "external" -ini $O2DPG_ROOT/MC/config/PWGDQ/ini/GeneratorHF_bbbarToDDbarToMuons_fwdy.ini \ + -genBkg pythia8 -procBkg cdiff -colBkg pp --embedding -nb ${NBKGEVENTS} \ + -confKeyBkg "Diamond.width[2]=6" -interactionRate 2000 --mft-assessment-full --fwdmatching-assessment-full + +# run workflow +${O2DPG_ROOT}/MC/bin/o2_dpg_workflow_runner.py -f workflow.json -f workflow.json -tt aod -jmax 1 From 93ba6ee13cded1fd1c20ace47b33e549653d21b2 Mon Sep 17 00:00:00 2001 From: benedikt-voelkel Date: Tue, 12 Mar 2024 15:16:04 +0100 Subject: [PATCH 101/101] Add first batch of PWG experts as code owners (#1529) --- CODEOWNERS | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/CODEOWNERS b/CODEOWNERS index 27ea257cd..3a3568f07 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -18,4 +18,10 @@ /DATA/testing/detectors/ZDC /MC @sawenzel @chiarazampolli @gconesab @benedikt-voelkel + +# PWG experts +/MC/*/PWGHF @AliceO2Group/reviewers-pwg-hf @sawenzel @chiarazampolli @benedikt-voelkel +/MC/*/PWGLF @AliceO2Group/reviewers-pwg-lf @sawenzel @chiarazampolli @benedikt-voelkel +/MC/*/PWGEM @AliceO2Group/reviewers-pwg-em @sawenzel @chiarazampolli @benedikt-voelkel + /RelVal @sawenzel @chiarazampolli @gconesab @benedikt-voelkel