Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

t: skip t1011 if job-archive module not detected, add new tests for fetch-job-records #518

Merged
merged 1 commit into from
Oct 17, 2024
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
76 changes: 15 additions & 61 deletions t/t1011-job-archive-interface.t
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,6 @@ test_description='test fetching jobs and updating the fair share values for a gr
. $(dirname $0)/sharness.sh

DB_PATH=$(pwd)/FluxAccountingTest.db
ARCHIVEDIR=`pwd`
ARCHIVEDB="${ARCHIVEDIR}/jobarchive.db"
QUERYCMD="flux python ${SHARNESS_TEST_SRCDIR}/scripts/query.py"
NO_JOBS=${SHARNESS_TEST_SRCDIR}/expected/job_usage/no_jobs.expected

Expand All @@ -15,27 +13,6 @@ test_under_flux 4 job

flux setattr log-stderr-level 1

# wait for job to be stored in job archive
# arg1 - jobid
# arg2 - database path
wait_db() {
local jobid=$(flux job id $1)
local dbpath=$2
local i=0
query="select id from jobs;"
while ! ${QUERYCMD} -t 100 ${dbpath} "${query}" | grep $jobid > /dev/null \
&& [ $i -lt 50 ]
do
sleep 0.1
i=$((i + 1))
done
if [ "$i" -eq "100" ]
then
return 1
fi
return 0
}

# select job records from flux-accounting DB
select_job_records() {
local dbpath=$1
Expand Down Expand Up @@ -70,29 +47,14 @@ test_expect_success 'add some users to the DB' '
flux account add-user --username=user5012 --userid=5012 --bank=account1 --shares=1
'

test_expect_success 'job-archive: set up config file' '
cat >archive.toml <<EOF &&
[archive]
dbpath = "${ARCHIVEDB}"
period = "0.5s"
busytimeout = "0.1s"
EOF
flux config reload
'

test_expect_success 'load job-archive module' '
flux module load job-archive
'

test_expect_success 'submit a job that does not run' '
job=$(flux submit --urgency=0 sleep 60) &&
flux job wait-event -vt 10 ${job} priority &&
flux cancel ${job} &&
wait_db ${job} ${ARCHIVEDB}
flux cancel ${job}
'

test_expect_success 'run scripts to update job usage and fair-share' '
flux account-fetch-job-records --copy ${ARCHIVEDB} -p ${DB_PATH} &&
flux account-fetch-job-records -p ${DB_PATH} &&
flux account -p ${DB_PATH} update-usage &&
flux account-update-fshare -p ${DB_PATH}
'
Expand All @@ -108,19 +70,19 @@ test_expect_success 'check that no jobs show up under user' '
test_cmp ${NO_JOBS} no_jobs.test
'

test_expect_success 'submit some jobs so they populate flux-core job-archive' '
test_expect_success 'submit some jobs and wait for them to finish running' '
jobid1=$(flux submit -N 1 hostname) &&
jobid2=$(flux submit -N 1 hostname) &&
jobid3=$(flux submit -N 2 hostname) &&
jobid4=$(flux submit -N 1 hostname) &&
wait_db ${jobid1} ${ARCHIVEDB} &&
wait_db ${jobid2} ${ARCHIVEDB} &&
wait_db ${jobid3} ${ARCHIVEDB} &&
wait_db ${jobid4} ${ARCHIVEDB}
flux job wait-event -vt 3 ${jobid1} clean &&
flux job wait-event -vt 3 ${jobid2} clean &&
flux job wait-event -vt 3 ${jobid3} clean &&
flux job wait-event -vt 3 ${jobid4} clean
'

test_expect_success 'call --copy argument to populate jobs table from job-archive DB' '
flux account-fetch-job-records --copy ${ARCHIVEDB} -p ${DB_PATH} &&
test_expect_success 'run fetch-job-records; ensure jobs show up in jobs table' '
flux account-fetch-job-records -p ${DB_PATH} &&
select_job_records ${DB_PATH} > records.out &&
grep "hostname" records.out
'
Expand All @@ -129,19 +91,15 @@ test_expect_success 'submit some sleep 1 jobs under one user' '
jobid1=$(flux submit -N 1 sleep 1) &&
jobid2=$(flux submit -N 1 sleep 1) &&
jobid3=$(flux submit -n 2 -N 2 sleep 1) &&
wait_db ${jobid1} ${ARCHIVEDB} &&
wait_db ${jobid2} ${ARCHIVEDB} &&
wait_db ${jobid3} ${ARCHIVEDB}
flux job wait-event -vt 3 ${jobid1} clean &&
flux job wait-event -vt 3 ${jobid2} clean &&
flux job wait-event -vt 3 ${jobid3} clean
'

test_expect_success 'run fetch-job-records script' '
flux account-fetch-job-records -p ${DB_PATH}
'

test_expect_success 'view job records for a user' '
flux account -p ${DB_PATH} view-job-records --user $username
'

test_expect_success 'view job records for a user and direct it to a file' '
flux account -p ${DB_PATH} --output-file $(pwd)/test.txt view-job-records --user $username
'
Expand All @@ -160,9 +118,9 @@ test_expect_success 'submit some sleep 1 jobs under the secondary bank of the sa
jobid1=$(flux submit --setattr=system.bank=account2 -N 1 sleep 1) &&
jobid2=$(flux submit --setattr=system.bank=account2 -N 1 sleep 1) &&
jobid3=$(flux submit --setattr=system.bank=account2 -n 2 -N 2 sleep 1) &&
wait_db ${jobid1} ${ARCHIVEDB} &&
wait_db ${jobid2} ${ARCHIVEDB} &&
wait_db ${jobid3} ${ARCHIVEDB}
flux job wait-event -vt 3 ${jobid1} clean &&
flux job wait-event -vt 3 ${jobid2} clean &&
flux job wait-event -vt 3 ${jobid3} clean
'

test_expect_success 'run custom job-list script' '
Expand Down Expand Up @@ -195,10 +153,6 @@ test_expect_success 'remove flux-accounting DB' '
rm $(pwd)/FluxAccountingTest.db
'

test_expect_success 'job-archive: unload module' '
flux module unload job-archive
'

test_expect_success 'shut down flux-accounting service' '
flux python -c "import flux; flux.Flux().rpc(\"accounting.shutdown_service\").get()"
'
Expand Down
Loading