Skip to content

Commit

Permalink
Merge pull request #207 from cmoussa1/add.queue.priority
Browse files Browse the repository at this point in the history
plugin: add queue priority to priority calculation, plugin
  • Loading branch information
mergify[bot] authored Apr 14, 2022
2 parents d659500 + 76a897e commit 9c2ad6c
Show file tree
Hide file tree
Showing 11 changed files with 621 additions and 49 deletions.
25 changes: 24 additions & 1 deletion src/cmd/flux-account-priority-update.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,11 +50,13 @@ def bulk_update(path):

data = {}
bulk_user_data = []
bulk_q_data = []

# fetch all rows from association_table (will print out tuples)
for row in cur.execute(
"""SELECT userid, bank, default_bank,
fairshare, max_running_jobs, max_active_jobs FROM association_table"""
fairshare, max_running_jobs, max_active_jobs,
queues FROM association_table"""
):
# create a JSON payload with the results of the query
single_user_data = {
Expand All @@ -64,14 +66,35 @@ def bulk_update(path):
"fairshare": float(row[3]),
"max_running_jobs": int(row[4]),
"max_active_jobs": int(row[5]),
"queues": str(row[6]),
}
bulk_user_data.append(single_user_data)

data = {"data": bulk_user_data}

flux.Flux().rpc("job-manager.mf_priority.rec_update", json.dumps(data)).get()

# fetch all rows from queue_table
for row in cur.execute("SELECT * FROM queue_table"):
# create a JSON payload with the results of the query
single_q_data = {
"queue": str(row[0]),
"min_nodes_per_job": int(row[1]),
"max_nodes_per_job": int(row[2]),
"max_time_per_job": int(row[3]),
"priority": int(row[4]),
}
bulk_q_data.append(single_q_data)

data = {"data": bulk_q_data}

flux.Flux().rpc("job-manager.mf_priority.rec_q_update", json.dumps(data)).get()

flux.Flux().rpc("job-manager.mf_priority.reprioritize")

# close DB connection
cur.close()


def main():
parser = argparse.ArgumentParser(
Expand Down
Loading

0 comments on commit 9c2ad6c

Please sign in to comment.