Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

plugin: add instance owner info to plugin #477

Merged
merged 3 commits into from
Aug 7, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
35 changes: 35 additions & 0 deletions src/cmd/flux-account-priority-update.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
import sqlite3
import json
import subprocess
import pwd

import flux

Expand Down Expand Up @@ -133,6 +134,39 @@ def bulk_update(path):
cur.close()


def send_instance_owner_info():
handle = flux.Flux()
# get uid, username of instance owner
owner_uid = handle.attr_get("security.owner")
try:
# look up corresponding username of instance owner
owner_info = pwd.getpwuid(int(owner_uid))
owner_username = owner_info.pw_name
except KeyError:
# can't find instance owner info; set username to the uid
owner_username = owner_uid

# construct instance owner dictionary
instance_owner_data = {
"userid": int(owner_uid),
"bank": owner_username,
"def_bank": owner_username,
"fairshare": 0.5,
"max_running_jobs": 1000000,
"max_active_jobs": 1000000,
"queues": "",
"active": 1,
"projects": "*",
"def_project": "*",
"max_nodes": 1000000,
}

flux.Flux().rpc(
"job-manager.mf_priority.rec_update",
json.dumps({"data": [instance_owner_data]}),
).get()


def main():
parser = argparse.ArgumentParser(
description="""
Expand All @@ -149,6 +183,7 @@ def main():
path = set_db_loc(args)

bulk_update(path)
send_instance_owner_info()


if __name__ == "__main__":
Expand Down
4 changes: 1 addition & 3 deletions t/Makefile.am
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,7 @@ TESTSCRIPTS = \
t1036-hierarchy-small-no-tie-db.t \
t1037-hierarchy-small-tie-db.t \
t1038-hierarchy-small-tie-all-db.t \
t1039-issue476.t \
t5000-valgrind.t \
python/t1000-example.py \
python/t1001_db.py \
Expand Down Expand Up @@ -108,9 +109,6 @@ EXTRA_DIST= \
expected/pop_db/db_hierarchy_base.expected \
expected/pop_db/db_hierarchy_new_users.expected \
expected/job_usage/no_jobs.expected \
expected/plugin_state/internal_state_1.expected \
expected/plugin_state/internal_state_3.expected \
expected/plugin_state/internal_state_3.expected \
expected/sample_payloads/same_fairshare.json \
expected/sample_payloads/small_no_tie.json \
expected/sample_payloads/small_tie_all.json \
Expand Down
45 changes: 0 additions & 45 deletions t/expected/plugin_state/internal_state_1.expected

This file was deleted.

71 changes: 0 additions & 71 deletions t/expected/plugin_state/internal_state_3.expected

This file was deleted.

32 changes: 17 additions & 15 deletions t/t1019-mf-priority-info-fetch.t
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@ test_description='Test getting internal state of plugin using flux jobtap query'
MULTI_FACTOR_PRIORITY=${FLUX_BUILD_DIR}/src/plugins/.libs/mf_priority.so
SUBMIT_AS=${SHARNESS_TEST_SRCDIR}/scripts/submit_as.py
DB_PATH=$(pwd)/FluxAccountingTest.db
EXPECTED_FILES=${SHARNESS_TEST_SRCDIR}/expected/plugin_state

export TEST_UNDER_FLUX_NO_JOB_EXEC=y
export TEST_UNDER_FLUX_SCHED_SIMPLE_MODE="limited=1"
Expand Down Expand Up @@ -62,8 +61,8 @@ test_expect_success 'add some projects to the DB' '
'

test_expect_success 'add a user with two different banks to the DB' '
flux account add-user --username=user1001 --userid=1001 --bank=account1 --max-running-jobs=2 &&
flux account add-user --username=user1001 --userid=1001 --bank=account2
flux account add-user --username=user5001 --userid=5001 --bank=account1 --max-running-jobs=2 &&
flux account add-user --username=user5001 --userid=5001 --bank=account2
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Is there something in the testsuite that ensures the user running it is not 5001 (or for that matter is 1001). Or is the testsuite only expected to be run in certain environments?

One idea would be to add a check for the current uid in sharness.d/flux-accounting.sh and set up some environment variables with userids instead of explicitly using 1001 and 5001 (in case those conflict with system uids)

This could be done in a future cleanup PR if this is working for now.

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I guess there isn't anything explicit that is checking it (I just checked the logs from the tests that failed via GitHub actions), but that's a good idea to set up some environment variables so that I don't have to define them every single time I create a new test. I'll open an issue on this so I can get this cleaned up in the future.

'

test_expect_success 'send flux-accounting DB information to the plugin' '
Expand All @@ -72,22 +71,23 @@ test_expect_success 'send flux-accounting DB information to the plugin' '

test_expect_success HAVE_JQ 'fetch plugin state' '
flux jobtap query mf_priority.so > query_1.json &&
jq ".mf_priority_map" query_1.json > internal_state_1.test &&
test_cmp ${EXPECTED_FILES}/internal_state_1.expected internal_state_1.test
test_debug "jq -S . <query_1.json" &&
jq -e ".mf_priority_map[] | select(.userid == 5001) | .banks[0].bank_name == \"account1\"" <query_1.json &&
jq -e ".mf_priority_map[] | select(.userid == 5001) | .banks[1].bank_name == \"account2\"" <query_1.json
'

test_expect_success 'submit max number of jobs under default bank (1 held job due to max_run_jobs limit)' '
jobid1=$(flux python ${SUBMIT_AS} 1001 sleep 60) &&
jobid2=$(flux python ${SUBMIT_AS} 1001 sleep 60) &&
jobid3=$(flux python ${SUBMIT_AS} 1001 sleep 60)
jobid1=$(flux python ${SUBMIT_AS} 5001 sleep 60) &&
jobid2=$(flux python ${SUBMIT_AS} 5001 sleep 60) &&
jobid3=$(flux python ${SUBMIT_AS} 5001 sleep 60)
'

test_expect_success HAVE_JQ 'fetch plugin state and make sure that jobs are reflected in JSON object' '
flux jobtap query mf_priority.so > query_2.json &&
test_debug "jq -S . <query_2.json" &&
jq -e ".mf_priority_map[0].banks[0].held_jobs | length == 1" <query_2.json &&
jq -e ".mf_priority_map[0].banks[0].cur_run_jobs == 2" <query_2.json &&
jq -e ".mf_priority_map[0].banks[0].cur_active_jobs == 3" <query_2.json
jq -e ".mf_priority_map[] | select(.userid == 5001) | .banks[0].held_jobs | length == 1" <query_2.json &&
jq -e ".mf_priority_map[] | select(.userid == 5001) | .banks[0].cur_run_jobs == 2" <query_2.json &&
jq -e ".mf_priority_map[] | select(.userid == 5001) | .banks[0].cur_active_jobs == 3" <query_2.json
'

test_expect_success 'cancel jobs in reverse order so last job does not get alloc event' '
Expand All @@ -98,8 +98,8 @@ test_expect_success 'cancel jobs in reverse order so last job does not get alloc

test_expect_success 'add another user to flux-accounting DB and send it to plugin' '
flux account add-user \
--username=user1002 \
--userid=1002 \
--username=user5002 \
--userid=5002 \
--bank=account3 \
--queues="bronze" \
--projects="A,B" \
Expand All @@ -109,8 +109,10 @@ test_expect_success 'add another user to flux-accounting DB and send it to plugi

test_expect_success HAVE_JQ 'fetch plugin state again with multiple users' '
flux jobtap query mf_priority.so > query_3.json &&
jq ".mf_priority_map" query_3.json > internal_state_3.test &&
test_cmp ${EXPECTED_FILES}/internal_state_3.expected internal_state_3.test
test_debug "jq -S . <query_3.json" &&
jq -e ".mf_priority_map[] | select(.userid == 5001) | .banks[0].bank_name == \"account1\"" <query_3.json &&
jq -e ".mf_priority_map[] | select(.userid == 5001) | .banks[1].bank_name == \"account2\"" <query_3.json &&
jq -e ".mf_priority_map[] | select(.userid == 5002) | .banks[0].bank_name == \"account3\"" <query_3.json
'

test_expect_success 'shut down flux-accounting service' '
Expand Down
30 changes: 15 additions & 15 deletions t/t1020-mf-priority-issue262.t
Original file line number Diff line number Diff line change
Expand Up @@ -44,16 +44,16 @@ test_expect_success 'add some banks to the DB' '
'

test_expect_success 'add a user with two different banks to the DB' '
flux account add-user --username=user1001 --userid=1001 --bank=account1 --max-running-jobs=5 --max-active-jobs=10 &&
flux account add-user --username=user1001 --userid=1001 --bank=account2
flux account add-user --username=user5001 --userid=5001 --bank=account1 --max-running-jobs=5 --max-active-jobs=10 &&
flux account add-user --username=user5001 --userid=5001 --bank=account2
'

test_expect_success 'send flux-accounting DB information to the plugin' '
flux account-priority-update -p $(pwd)/FluxAccountingTest.db
'

test_expect_success 'submit a sleep 180 job and ensure it is running' '
jobid1=$(flux python ${SUBMIT_AS} 1001 sleep 180) &&
jobid1=$(flux python ${SUBMIT_AS} 5001 sleep 180) &&
flux job wait-event -vt 60 ${jobid1} alloc
'

Expand All @@ -62,25 +62,25 @@ test_expect_success 'stop scheduler from allocating resources to jobs' '
'

test_expect_success 'submit 2 more sleep 180 jobs; ensure both are in SCHED state' '
jobid2=$(flux python ${SUBMIT_AS} 1001 sleep 180) &&
jobid3=$(flux python ${SUBMIT_AS} 1001 sleep 180) &&
jobid2=$(flux python ${SUBMIT_AS} 5001 sleep 180) &&
jobid3=$(flux python ${SUBMIT_AS} 5001 sleep 180) &&
flux job wait-event -vt 60 ${jobid2} priority &&
flux job wait-event -vt 60 ${jobid3} priority
'

test_expect_success 'ensure current running and active jobs are correct: 1 running, 3 active' '
flux jobtap query mf_priority.so > query_1.json &&
test_debug "jq -S . <query_1.json" &&
jq -e ".mf_priority_map[0].banks[0].cur_run_jobs == 1" <query_1.json &&
jq -e ".mf_priority_map[0].banks[0].cur_active_jobs == 3" <query_1.json
jq -e ".mf_priority_map[] | select(.userid == 5001) | .banks[0].cur_run_jobs == 1" <query_1.json &&
jq -e ".mf_priority_map[] | select(.userid == 5001) | .banks[0].cur_active_jobs == 3" <query_1.json
'

test_expect_success 'update the plugin and ensure current running and active jobs are correct' '
flux account-priority-update -p $(pwd)/FluxAccountingTest.db &&
flux jobtap query mf_priority.so > query_2.json &&
test_debug "jq -S . <query_2.json" &&
jq -e ".mf_priority_map[0].banks[0].cur_run_jobs == 1" <query_2.json &&
jq -e ".mf_priority_map[0].banks[0].cur_active_jobs == 3" <query_2.json
jq -e ".mf_priority_map[] | select(.userid == 5001) | .banks[0].cur_run_jobs == 1" <query_2.json &&
jq -e ".mf_priority_map[] | select(.userid == 5001) | .banks[0].cur_active_jobs == 3" <query_2.json
'

test_expect_success 'change the priority of one of the jobs' '
Expand All @@ -92,25 +92,25 @@ test_expect_success 'change the priority of one of the jobs' '
test_expect_success 'ensure job counts are still the same: 1 running, 3 active' '
flux jobtap query mf_priority.so > query_3.json &&
test_debug "jq -S . <query_3.json" &&
jq -e ".mf_priority_map[0].banks[0].cur_run_jobs == 1" <query_3.json &&
jq -e ".mf_priority_map[0].banks[0].cur_active_jobs == 3" <query_3.json
jq -e ".mf_priority_map[] | select(.userid == 5001) | .banks[0].cur_run_jobs == 1" <query_3.json &&
jq -e ".mf_priority_map[] | select(.userid == 5001) | .banks[0].cur_active_jobs == 3" <query_3.json
'

test_expect_success 'cancel one of the scheduled jobs, check job counts are correct: 1 running, 2 active' '
flux cancel ${jobid2} &&
flux jobtap query mf_priority.so > query_4.json &&
test_debug "jq -S . <query_4.json" &&
jq -e ".mf_priority_map[0].banks[0].cur_run_jobs == 1" <query_4.json &&
jq -e ".mf_priority_map[0].banks[0].cur_active_jobs == 2" <query_4.json
jq -e ".mf_priority_map[] | select(.userid == 5001) | .banks[0].cur_run_jobs == 1" <query_4.json &&
jq -e ".mf_priority_map[] | select(.userid == 5001) | .banks[0].cur_active_jobs == 2" <query_4.json
'

test_expect_success 'cancel sleep 180 job(s), check job counts: 0 running, 0 active' '
flux cancel ${jobid1} &&
flux cancel ${jobid3} &&
flux jobtap query mf_priority.so > query_5.json &&
test_debug "jq -S . <query_5.json" &&
jq -e ".mf_priority_map[0].banks[0].cur_run_jobs == 0" <query_5.json &&
jq -e ".mf_priority_map[0].banks[0].cur_active_jobs == 0" <query_5.json
jq -e ".mf_priority_map[] | select(.userid == 5001) | .banks[0].cur_run_jobs == 0" <query_5.json &&
jq -e ".mf_priority_map[] | select(.userid == 5001) | .banks[0].cur_active_jobs == 0" <query_5.json
'

test_done
Loading
Loading