Skip to content

Commit

Permalink
Merge branch 'AliceO2Group:master' into tpc-cleanup
Browse files Browse the repository at this point in the history
  • Loading branch information
rmunzer authored Apr 3, 2024
2 parents d8ef510 + adb8e49 commit 358955b
Show file tree
Hide file tree
Showing 11 changed files with 156 additions and 72 deletions.
57 changes: 33 additions & 24 deletions DATA/common/setenv_calib.sh
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@ if has_detector_calib FT0 && has_detector_reco FT0; then CAN_DO_CALIB_FT0_TIMEOF
if has_detector_calib FV0 && has_processing_step FV0_RECO; then CAN_DO_CALIB_FV0_INTEGRATEDCURR=1; else CAN_DO_CALIB_FV0_INTEGRATEDCURR=0; fi
if has_detector_calib FDD && has_processing_step FDD_RECO; then CAN_DO_CALIB_FDD_INTEGRATEDCURR=1; else CAN_DO_CALIB_FDD_INTEGRATEDCURR=0; fi
if has_detector_calib ZDC && has_processing_step ZDC_RECO; then CAN_DO_CALIB_ZDC_TDC=1; else CAN_DO_CALIB_ZDC_TDC=0; fi
if [[ $SYNCMODE == 1 ]] && has_processing_step ENTROPY_ENCODER && [[ ! -z "$WORKFLOW_DETECTORS_CTF" ]] && [[ $WORKFLOW_DETECTORS_CTF != "NONE" ]]; then CAN_DO_CALIB_RCT_UPDATER=1; else CAN_DO_CALIB_RCT_UPDATER=0; fi
# for async recalibration
if has_detector_calib EMC && has_detector_reco EMC && [[ $SYNCMODE != 1 ]]; then CAN_DO_CALIB_EMC_ASYNC_RECALIB=1; else CAN_DO_CALIB_EMC_ASYNC_RECALIB=0; fi
if [[ $SYNCMODE != 1 ]] && has_detector_reco TPC; then CAN_DO_CALIB_ASYNC_EXTRACTTPCCURRENTS=1; else CAN_DO_CALIB_ASYNC_EXTRACTTPCCURRENTS=0; fi
Expand All @@ -42,7 +43,7 @@ if [[ ${DISABLE_TRD_PH:-} == 1 ]]; then CAN_DO_CALIB_TRD_T0=0; fi
: ${CALIB_TPC_SCDCALIB_SLOTLENGTH:=600} # the slot length needs to be known both on the aggregator and the processing nodes, therefore it is defined (in seconds!) here
: ${CALIB_TPC_SCDCALIB_SENDTRKDATA:=1} # by default, we want to write the track information in addition to unbinned residuals to allow finer filtering offline

if [[ $BEAMTYPE != "cosmic" ]] || [[ ${FORCECALIBRATIONS:-} == 1 ]] ; then
if [[ $BEAMTYPE != "cosmic" ]] || [[ ${FORCECALIBRATIONS:-} == 1 ]] ; then # Calibrations enabled in non-COSMIC runs

# here we won't deal with calibrations only for async! e.g. EMC_ASYNC_RECALIB; we want that they are always explicitly enabled

Expand Down Expand Up @@ -88,26 +89,6 @@ if [[ $BEAMTYPE != "cosmic" ]] || [[ ${FORCECALIBRATIONS:-} == 1 ]] ; then
if [[ $CAN_DO_CALIB_TPC_VDRIFTTGL == 1 ]]; then
if [[ -z ${CALIB_TPC_VDRIFTTGL+x} ]]; then CALIB_TPC_VDRIFTTGL=1; fi
fi
# IDCs (by default we enable it for running the synch. reco on the EPNs, but not on staging since we have only 1 calibration node available)
if [[ $CAN_DO_CALIB_TPC_IDC == 1 ]]; then
if [[ -z ${CALIB_TPC_IDC+x} ]]; then
if [[ $EPNSYNCMODE == 1 ]] && [[ "${GEN_TOPO_DEPLOYMENT_TYPE:-}" != "ALICE_STAGING" ]]; then
CALIB_TPC_IDC=1;
else
CALIB_TPC_IDC=0;
fi
fi
fi
# SAC (by default we enable it for running the synch. reco on the EPNs)
if [[ $CAN_DO_CALIB_TPC_SAC == 1 ]]; then
if [[ -z ${CALIB_TPC_SAC+x} ]]; then
if [[ $EPNSYNCMODE == 1 ]]; then
CALIB_TPC_SAC=1;
else
CALIB_TPC_SAC=0;
fi
fi
fi

# calibrations for TRD
if [[ $CAN_DO_CALIB_TRD_VDRIFTEXB == 1 ]] ; then
Expand Down Expand Up @@ -170,6 +151,34 @@ if [[ $BEAMTYPE != "cosmic" ]] || [[ ${FORCECALIBRATIONS:-} == 1 ]] ; then
fi
fi

# Calibrations irrespective of COSMIC or non-COSMIC run:

# when possible, run RCT updater
if [[ $CAN_DO_CALIB_RCT_UPDATER == 1 ]]; then
if [[ -z ${CALIB_RCT_UPDATER+x} ]]; then CALIB_RCT_UPDATER=1; fi
fi

# IDCs (by default we enable it for running the synch. reco on the EPNs, but not on staging since we have only 1 calibration node available)
if [[ $CAN_DO_CALIB_TPC_IDC == 1 ]]; then
if [[ -z ${CALIB_TPC_IDC+x} ]]; then
if [[ $EPNSYNCMODE == 1 ]] && [[ "${GEN_TOPO_DEPLOYMENT_TYPE:-}" != "ALICE_STAGING" ]]; then
CALIB_TPC_IDC=1;
else
CALIB_TPC_IDC=0;
fi
fi
fi
# SAC (by default we enable it for running the synch. reco on the EPNs)
if [[ $CAN_DO_CALIB_TPC_SAC == 1 ]]; then
if [[ -z ${CALIB_TPC_SAC+x} ]]; then
if [[ $EPNSYNCMODE == 1 ]]; then
CALIB_TPC_SAC=1;
else
CALIB_TPC_SAC=0;
fi
fi
fi

( [[ -z ${CALIB_FT0_INTEGRATEDCURR:-} ]] || [[ $CAN_DO_CALIB_FT0_INTEGRATEDCURR == 0 ]] ) && CALIB_FT0_INTEGRATEDCURR=0
( [[ -z ${CALIB_FV0_INTEGRATEDCURR:-} ]] || [[ $CAN_DO_CALIB_FV0_INTEGRATEDCURR == 0 ]] ) && CALIB_FV0_INTEGRATEDCURR=0
( [[ -z ${CALIB_FDD_INTEGRATEDCURR:-} ]] || [[ $CAN_DO_CALIB_FDD_INTEGRATEDCURR == 0 ]] ) && CALIB_FDD_INTEGRATEDCURR=0
Expand Down Expand Up @@ -201,8 +210,8 @@ fi
( [[ -z ${CALIB_MFT_DEADMAP_TIME:-} ]] || [[ $CAN_DO_CALIB_MFT_DEADMAP_TIME == 0 ]] ) && CALIB_MFT_DEADMAP_TIME=0
# for async:
( [[ -z ${CALIB_EMC_ASYNC_RECALIB:-} ]] || [[ $CAN_DO_CALIB_EMC_ASYNC_RECALIB == 0 ]] ) && CALIB_EMC_ASYNC_RECALIB=0
( [[ -z ${CALIB_ASYNC_EXTRACTTPCCURRENTS:-} ]] || [[ $CAN_DO_CALIB_ASYNC_EXTRACTTPCCURRENTS == 0 ]] ) && CALIB_ASYNC_EXTRACTTPCCURRENTS=0
( [[ -z ${CALIB_ASYNC_DISABLE3DCURRENTS:-} ]] || [[ $CAN_DO_CALIB_ASYNC_DISABLE3DCURRENTS == 0 ]] ) && CALIB_ASYNC_DISABLE3DCURRENTS=0
( [[ -z ${CALIB_ASYNC_EXTRACTTPCCURRENTS:-} ]] || [[ $CAN_DO_CALIB_ASYNC_EXTRACTTPCCURRENTS == 0 ]] ) && CALIB_ASYNC_EXTRACTTPCCURRENTS=0
( [[ -z ${CALIB_ASYNC_DISABLE3DCURRENTS:-} ]] || [[ $CAN_DO_CALIB_ASYNC_DISABLE3DCURRENTS == 0 ]] ) && CALIB_ASYNC_DISABLE3DCURRENTS=0
: ${ON_SKIMMED_DATA:=0}
( [[ -z ${CALIB_ASYNC_EXTRACTTIMESERIES:-} ]] || [[ $CAN_DO_CALIB_ASYNC_EXTRACTTIMESERIES == 0 ]] ) && CALIB_ASYNC_EXTRACTTIMESERIES=0

Expand Down Expand Up @@ -241,7 +250,7 @@ fi
# define spec for proxy for TF-based outputs from BARREL
if [[ -z ${CALIBDATASPEC_BARREL_TF:-} ]]; then
# RCT updater
if [[ ${CALIB_RCT_UPDATER:-} == 1 ]]; then add_semicolon_separated CALIBDATASPEC_BARREL_TF "calibRCT:CTD/DONE/0"; fi
if [[ ${CALIB_RCT_UPDATER:-} == 1 ]]; then add_semicolon_separated CALIBDATASPEC_BARREL_TF "calibRCT:CTF/DONE/0"; fi
# prim vtx
if [[ $CALIB_PRIMVTX_MEANVTX == 1 ]]; then add_semicolon_separated CALIBDATASPEC_BARREL_TF "pvtx:GLO/PVTX/0"; fi

Expand Down
36 changes: 36 additions & 0 deletions DATA/production/calib/emc-pedestal-aggregator.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
#!/bin/bash

source common/setenv.sh

# ---------------------------------------------------------------------------------------------------------------------
# Set general arguments
source common/getCommonArgs.sh

INPTYPE=""
PROXY_INSPEC="A:EMC/PEDDATA/0;eos:***/INFORMATION"

CCDBPATH1="http://o2-ccdb.internal"
CCDBPATH2="$DCSCCDBSERVER"
if [[ $RUNTYPE == "SYNTHETIC" || "${GEN_TOPO_DEPLOYMENT_TYPE:-}" == "ALICE_STAGING" || ! -z $ISTEST ]]; then
CCDBPATH1="http://ccdb-test.cern.ch:8080"
CCDBPATH2="http://ccdb-test.cern.ch:8080"
fi

QC_CONFIG="/o2/components/qc/ANY/any/emc-pedestal-qc"

WORKFLOW=
add_W o2-dpl-raw-proxy "--proxy-name emc-pedestal-input-proxy --dataspec \"$PROXY_INSPEC\" --network-interface ib0 --channel-config \"name=emc-pedestal-input-proxy,method=bind,type=pull,rateLogging=1,transport=zeromq\"" "" 0
add_W o2-calibration-emcal-pedestal-calib-workflow
add_W o2-calibration-ccdb-populator-workflow "--ccdb-path=\"$CCDBPATH1\" --sspec-min 0 --sspec-max 0"
add_W o2-calibration-ccdb-populator-workflow "--ccdb-path=\"$CCDBPATH2\" --sspec-min 1 --sspec-max 1 --name-extention dcs"
add_QC_from_consul "${QC_CONFIG}" "--local --host localhost"
WORKFLOW+="o2-dpl-run $ARGS_ALL $GLOBALDPLOPT"

if [ $WORKFLOWMODE == "print" ]; then
echo Workflow command:
echo $WORKFLOW | sed "s/| */|\n/g"
else
# Execute the command we have assembled
WORKFLOW+=" --$WORKFLOWMODE ${WORKFLOWMODE_FILE}"
eval $WORKFLOW
fi
28 changes: 28 additions & 0 deletions DATA/production/calib/emc-pedestal-processing.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
#!/bin/bash

source common/setenv.sh

# ---------------------------------------------------------------------------------------------------------------------
# Set general arguments
source common/getCommonArgs.sh

PROXY_INSPEC="A:EMC/RAWDATA;dd:FLP/DISTSUBTIMEFRAME/0;eos:***/INFORMATION"

PROXY_OUTSPEC="downstream:EMC/PEDDATA/0"

[[ -z $NEMCPROCPIPELINES ]] && NEMCPROCPIPELINES=30

WORKFLOW=
add_W o2-dpl-raw-proxy "--dataspec \"$PROXY_INSPEC\" --inject-missing-data --channel-config \"name=readout-proxy,type=pull,method=connect,address=ipc://@$INRAWCHANNAME,transport=shmem,rateLogging=1\"" "" 0
add_W o2-calibration-emcal-pedestal-processor-workflow "--pipeline PedestalProcessor:${NEMCPROCPIPELINES}"
add_W o2-dpl-output-proxy "--dataspec \"$PROXY_OUTSPEC\" --proxy-channel-name emc-pedestal-input-proxy --channel-config \"name=emc-pedestal-input-proxy,method=connect,type=push,transport=zeromq,rateLogging=1\"" "" 0
WORKFLOW+="o2-dpl-run ${ARGS_ALL} ${GLOBALDPLOPT}"

if [ $WORKFLOWMODE == "print" ]; then
echo Workflow command:
echo $WORKFLOW | sed "s/| */|\n/g"
else
# Execute the command we have assembled
WORKFLOW+=" --$WORKFLOWMODE ${WORKFLOWMODE_FILE}"
eval $WORKFLOW
fi
2 changes: 1 addition & 1 deletion DATA/production/calib/tpc-pedestal.sh
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ WORKFLOW=
add_W o2-dpl-raw-proxy "--dataspec \"$PROXY_INSPEC\" --inject-missing-data --channel-config \"name=readout-proxy,type=pull,method=connect,address=ipc://@tf-builder-pipe-0,transport=shmem,rateLogging=1\"" "" 0
add_W o2-tpc-calib-pad-raw "--input-spec \"$CALIB_INSPEC\" --publish-after-tfs ${publish_after} --max-events ${max_events} --lanes 36"
add_W o2-calibration-ccdb-populator-workflow "--ccdb-path \"http://o2-ccdb.internal\" " "" 0
add_QC_from_consul "${QC_CONFIG}" "--local --host lcoalhost"
add_QC_from_consul "${QC_CONFIG}" "--local --host localhost"

WORKFLOW+="o2-dpl-run ${ARGS_ALL} ${GLOBALDPLOPT}"

Expand Down
2 changes: 1 addition & 1 deletion DATA/production/calib/tpc-pulser-long.sh
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ WORKFLOW=
add_W o2-dpl-raw-proxy "--dataspec \"$PROXY_INSPEC\" --inject-missing-data --channel-config \"name=readout-proxy,type=pull,method=connect,address=ipc://@tf-builder-pipe-0,transport=shmem,rateLogging=1\"" "" 0
add_W o2-tpc-calib-pad-raw "--input-spec \"$CALIB_INSPEC\" --calib-type pulser --reset-after-publish --publish-after-tfs ${publish_after} --max-events ${max_events} --lanes 36 --check-calib-infos"
add_W o2-calibration-ccdb-populator-workflow "--ccdb-path \"http://o2-ccdb.internal\" " "" 0
add_QC_from_consul "${QC_CONFIG}" "--local --host lcoalhost"
add_QC_from_consul "${QC_CONFIG}" "--local --host localhost"

WORKFLOW+="o2-dpl-run ${ARGS_ALL} ${GLOBALDPLOPT}"

Expand Down
2 changes: 1 addition & 1 deletion DATA/production/calib/tpc-pulser.sh
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ WORKFLOW=
add_W o2-dpl-raw-proxy "--dataspec \"$PROXY_INSPEC\" --inject-missing-data --channel-config \"name=readout-proxy,type=pull,method=connect,address=ipc://@tf-builder-pipe-0,transport=shmem,rateLogging=1\"" "" 0
add_W o2-tpc-calib-pad-raw "--input-spec \"$CALIB_INSPEC\" --calib-type pulser --publish-after-tfs ${publish_after} --max-events ${max_events} --lanes 36 --check-calib-infos" "${CALIB_CONFIG}"
add_W o2-calibration-ccdb-populator-workflow "--ccdb-path \"http://o2-ccdb.internal\" " "" 0
add_QC_from_consul "${QC_CONFIG}" "--local --host lcoalhost"
add_QC_from_consul "${QC_CONFIG}" "--local --host localhost"

WORKFLOW+="o2-dpl-run ${ARGS_ALL} ${GLOBALDPLOPT}"

Expand Down
2 changes: 1 addition & 1 deletion DATA/production/qc-async/mch-tracks.json
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@
"maxNumberCycles": "-1",
"dataSource": {
"type": "direct",
"query": "trackMCH:MCH/TRACKS;trackMCHROF:MCH/TRACKROFS;trackMCHTRACKCLUSTERS:MCH/TRACKCLUSTERS;mchtrackdigits:MCH/TRACKDIGITS"
"query": "trackMCH:MCH/TRACKS;trackMCHROF:MCH/TRACKROFS;trackMCHTRACKCLUSTERS:MCH/TRACKCLUSTERS;mchtrackdigits:MCH/CLUSTERDIGITS"
},
"taskParameters": {
"maxTracksPerTF": "600",
Expand Down
2 changes: 2 additions & 0 deletions DATA/production/standalone-calibration.desc
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
EMC-pedestal-calibration: "O2PDPSuite" reco,1,1,"production/calib/emc-pedestal-processing.sh" calib,1,"production/calib/emc-pedestal-aggregator.sh"

FT0-time-offset-calibration: "O2PDPSuite" reco,5,5,"production/calib/ft0-timeoffset-processing.sh" calib,1,"production/calib/ft0-timeoffset-aggregator.sh"

ITS-noise-calibration: "O2PDPSuite" reco,20,20,"NITSDECTHREADS=4 NITSDECTPIPELINES=6 production/calib/its-noise-processing.sh" calib,20,"NTHREADSACC=4 NTHREADSNORM=16 NITSACCPIPELINES=16 production/calib/its-noise-aggregator.sh"
Expand Down
2 changes: 1 addition & 1 deletion DATA/testing/detectors/TPC/tpc-laser-raw-filter.sh
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ WORKFLOW=
add_W o2-dpl-raw-proxy "--dataspec \"$PROXY_INSPEC\" --inject-missing-data --channel-config \"name=readout-proxy,type=pull,method=connect,address=ipc://@tf-builder-pipe-0,transport=shmem,rateLogging=1\"" "" 0
add_W o2-tpc-raw-to-digits-workflow "--ignore-grp --input-spec \"$CALIB_INSPEC\" --remove-duplicates --pipeline tpc-raw-to-digits-0:20"
add_W o2-tpc-krypton-raw-filter "tpc-raw-to-digits-0:24 --lanes $NLANES --writer-type EPN --meta-output-dir $EPN2EOS_METAFILES_DIR --output-dir $CALIB_DIR --threshold-max 20 --max-tf-per-file 8000 --time-bins-before 20 --max-time-bins 650"
add_QC_from_consul "${QC_CONFIG_CONSUL}" "--local --host lcoalhost"
add_QC_from_consul "${QC_CONFIG_CONSUL}" "--local --host localhost"



Expand Down
83 changes: 46 additions & 37 deletions GRID/utils/grid_submit.sh
Original file line number Diff line number Diff line change
Expand Up @@ -230,6 +230,12 @@ export CONTROLSERVER
export PRODSPLIT
[[ $PRODSPLIT -gt 100 ]] && echo "Production split needs to be smaller than 100 for the moment" && exit 1

# check for presence of jq (needed in code path to fetch output files)
[[ "$FETCHOUTPUT" ]] && { which jq &> /dev/null || { echo "Could not find jq command. Please load or install" && exit 1; }; }

# check if script is actually a valid file and fail early if not
[[ "${SCRIPT}" ]] && [[ ! -f "${SCRIPT}" ]] && echo "Script file ${SCRIPT} does not exist .. aborting" && exit 1

# analyse options:
# we should either run with --script or with -c
[ "${SCRIPT}" ] && [ "$CONTINUE_WORKDIR" ] && echo "Script and continue mode not possible at same time" && exit 1
Expand Down Expand Up @@ -411,37 +417,46 @@ EOF
echo -ne "\b\b\b${spin[$((counter%4))]} ${JOBSTATUS}"
let counter=counter+1
if [ ! "${counter}" == "100" ]; then
# ensures that we see spinner ... but only check for new job
# status every 100 * 0.5 = 50s?
continue
fi
let counter=0
let counter=0 # reset counter
JOBSTATUS=$(alien.py ps -j ${MY_JOBID} | awk '//{print $4}')
# echo -ne "Waiting for jobs to return; Last status ${JOBSTATUS}"
if [ "$JOBSTATUS" == "D" ]; then

if [ "${JOBSTATUS}" == "D" ]; then
echo "Job done"
WAITFORALIEN=""
WAITFORALIEN="" # guarantees to go out of outer while loop

if [ "${FETCHOUTPUT}" ]; then
SUBJOBIDS=""
while [ ! ${SUBJOBIDS} ]; do
SUBJOBIDS=($(alien.py ps --trace ${MY_JOBID} | awk '/Subjob submitted/' | sed 's/.*submitted: //' | tr '\n' ' '))
sleep 1
done
# TODO: make this happen in a single alien.py session and with parallel copying
echo "Fetching results"
for splitcounter in `seq 1 ${PRODSPLIT}`; do
# we still need to check if this particular subjob was successful
SUBJOBSTATUS=$(alien.py ps -j ${SUBJOBIDS[splitcounter-1]} | awk '//{print $4}')
if [ "$SUBJOBSTATUS" == "D" ]; then
SPLITOUTDIR=$(printf "%03d" ${splitcounter})
[ ! -f ${SPLITOUTDIR} ] && mkdir ${SPLITOUTDIR}
echo "Fetching result files for subjob ${splitcounter} into ${PWD}"
CPCMD="alien.py cp ${MY_JOBWORKDIR}/${SPLITOUTDIR}/* file:./${SPLITOUTDIR}"
eval "${CPCMD}" 2> /dev/null
else
echo "Not fetching files for subjob ${splitcounter} since job code is ${SUBJOBSTATUS}"
fi
done
wait
SUBJOBIDS=()
SUBJOBSTATUSES=()
echo "Fetching subjob info"
while [ "${#SUBJOBIDS[@]}" == "0" ]; do
QUERYRESULT=$(ALIENPY_JSON=true alien.py ps -a -m ${MY_JOBID})
SUBJOBIDS=($(echo ${QUERYRESULT} | jq -r '.results[].id' | tr '\n' ' '))
SUBJOBSTATUSES=($(echo ${QUERYRESULT} | jq -r '.results[].status' | tr '\n' ' '))
# echo "LENGTH SUBJOBS ${#SUBJOBIDS[@]}"
sleep 1
done
# TODO: make this happen with parallel copying
echo "Fetching results for ${PRODSPLIT} sub-jobs"
for splitcounter in `seq 1 ${PRODSPLIT}`; do
let jobindex=splitcounter-1
THIS_STATUS=${SUBJOBSTATUSES[jobindex]}
THIS_JOB=${SUBJOBIDS[jobindex]}
echo "Fetching for job ${THIS_JOB}"
if [ "${THIS_STATUS}" == "DONE" ]; then
SPLITOUTDIR=$(printf "%03d" ${splitcounter})
[ ! -f ${SPLITOUTDIR} ] && mkdir ${SPLITOUTDIR}
echo "Fetching result files for subjob ${splitcounter} into ${PWD}"
CPCMD="alien.py cp ${MY_JOBWORKDIR}/${SPLITOUTDIR}/* file:./${SPLITOUTDIR}"
eval "${CPCMD}" 2> /dev/null
else
echo "Not fetching files for subjob ${splitcounter} since job code is ${THIS_STATUS}"
fi
done
fi
fi
if [[ "${FOO:0:1}" == [EK] ]]; then
Expand Down Expand Up @@ -541,13 +556,13 @@ if [ "${ONGRID}" = "1" ]; then
fi

# ----------- DOWNLOAD ADDITIONAL HELPERS ----------------------------
curl -o analyse_CPU.py https://raw.githubusercontent.com/sawenzel/AliceO2/swenzel/cpuana/Utilities/Tools/analyse_CPU.py &> /dev/null
chmod +x analyse_CPU.py
# curl -o analyse_CPU.py https://raw.githubusercontent.com/sawenzel/AliceO2/swenzel/cpuana/Utilities/Tools/analyse_CPU.py &> /dev/null
# chmod +x analyse_CPU.py
export PATH=$PATH:$PWD
export JOBUTILS_MONITORCPU=ON
export JOBUTILS_WRAPPER_SLEEP=5
#export JOBUTILS_JOB_KILLINACTIVE=180 # kill inactive jobs after 3 minutes --> will be the task of pipeline runner? (or make it optional)
export JOBUTILS_MONITORMEM=ON
# export JOBUTILS_MONITORCPU=ON
# export JOBUTILS_WRAPPER_SLEEP=5
# export JOBUTILS_JOB_KILLINACTIVE=180 # kill inactive jobs after 3 minutes --> will be the task of pipeline runner? (or make it optional)
# export JOBUTILS_MONITORMEM=ON

# ----------- EXECUTE ACTUAL JOB ------------------------------------
# source the actual job script from the work dir
Expand All @@ -558,13 +573,7 @@ chmod +x ./alien_jobscript.sh
cp alien_log_${ALIEN_PROC_ID:-0}.txt logtmp_${ALIEN_PROC_ID:-0}.txt
[ "${ALIEN_JOB_OUTPUTDIR}" ] && upload_to_Alien logtmp_${ALIEN_PROC_ID:-0}.txt ${ALIEN_JOB_OUTPUTDIR}/

# MOMENTARILY WE ZIP ALL LOG FILES
ziparchive=logs_PROCID${ALIEN_PROC_ID:-0}.zip
find ./ -name "*.log*" -exec zip ${ziparchive} {} ';'
find ./ -name "*mergerlog*" -exec zip ${ziparchive} {} ';'
find ./ -name "*serverlog*" -exec zip ${ziparchive} {} ';'
find ./ -name "*workerlog*" -exec zip ${ziparchive} {} ';'
find ./ -name "alien_log*.txt" -exec zip ${ziparchive} {} ';'
echo "Job done"

# We need to exit for the ALIEN JOB HANDLER!
exit 0
12 changes: 6 additions & 6 deletions MC/config/PWGLF/xsection/g4config_had_x2.in
Original file line number Diff line number Diff line change
Expand Up @@ -64,10 +64,10 @@


# Scale hadronic cross section
/mcPhysics/setCrossSectionFactor deuteron hadElastic 2.
/mcPhysics/setCrossSectionFactor anti_deuteron hadElastic 2.
/mcPhysics/setCrossSectionFactor triton hadElastic 2.
/mcPhysics/setCrossSectionFactor anti_triton hadElastic 2.
/mcPhysics/setCrossSectionFactor he3 hadElastic 2.
/mcPhysics/setCrossSectionFactor anti_he3 hadElastic 2.
/mcPhysics/setCrossSectionFactor deuteron hadInElastic 2.
/mcPhysics/setCrossSectionFactor anti_deuteron hadInElastic 2.
/mcPhysics/setCrossSectionFactor triton hadInElastic 2.
/mcPhysics/setCrossSectionFactor anti_triton hadInElastic 2.
/mcPhysics/setCrossSectionFactor he3 hadInElastic 2.
/mcPhysics/setCrossSectionFactor anti_he3 hadInElastic 2.

0 comments on commit 358955b

Please sign in to comment.