Skip to content

Commit

Permalink
feat(config): set maximum limit for number of Dask workers (#712)
Browse files Browse the repository at this point in the history
  • Loading branch information
Alputer committed Nov 11, 2024
1 parent 95ea2df commit d36b59b
Show file tree
Hide file tree
Showing 4 changed files with 44 additions and 0 deletions.
15 changes: 15 additions & 0 deletions docs/openapi.json
Original file line number Diff line number Diff line change
Expand Up @@ -445,6 +445,10 @@
"title": "The maximum memory limit for Dask clusters created by users",
"value": "16Gi"
},
"dask_cluster_max_number_of_workers": {
"title": "The maximum number of workers that users can ask for the single Dask cluster",
"value": "20"
},
"dask_cluster_max_single_worker_memory": {
"title": "The maximum amount of memory that users can ask for the single Dask worker",
"value": "8Gi"
Expand Down Expand Up @@ -547,6 +551,17 @@
},
"type": "object"
},
"dask_cluster_max_number_of_workers": {
"properties": {
"title": {
"type": "string"
},
"value": {
"type": "string"
}
},
"type": "object"
},
"dask_cluster_max_single_worker_memory": {
"properties": {
"title": {
Expand Down
5 changes: 5 additions & 0 deletions reana_server/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,6 +74,11 @@
)
"""Number of workers in Dask cluster by default """

REANA_DASK_CLUSTER_MAX_NUMBER_OF_WORKERS = int(
os.getenv("REANA_DASK_CLUSTER_MAX_NUMBER_OF_WORKERS", 20)
)
"""Maximum number of workers in Dask cluster."""

REANA_DASK_CLUSTER_DEFAULT_SINGLE_WORKER_MEMORY = os.getenv(
"REANA_DASK_CLUSTER_DEFAULT_SINGLE_WORKER_MEMORY", "2Gi"
)
Expand Down
17 changes: 17 additions & 0 deletions reana_server/rest/info.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@
REANA_DASK_CLUSTER_MAX_MEMORY_LIMIT,
REANA_DASK_CLUSTER_DEFAULT_SINGLE_WORKER_MEMORY,
REANA_DASK_CLUSTER_MAX_SINGLE_WORKER_MEMORY,
REANA_DASK_CLUSTER_MAX_NUMBER_OF_WORKERS,
)
from reana_server.decorators import signin_required

Expand Down Expand Up @@ -173,6 +174,13 @@ def info(user, **kwargs): # noqa
value:
type: string
type: object
dask_cluster_max_number_of_workers:
properties:
title:
type: string
value:
type: string
type: object
type: object
examples:
application/json:
Expand Down Expand Up @@ -237,6 +245,10 @@ def info(user, **kwargs): # noqa
"title": "The maximum amount of memory that users can ask for the single Dask worker",
"value": "8Gi"
},
"dask_cluster_max_number_of_workers": {
"title": "The maximum number of workers that users can ask for the single Dask cluster",
"value": "20"
},
}
500:
description: >-
Expand Down Expand Up @@ -315,6 +327,10 @@ def info(user, **kwargs): # noqa
title="The maximum amount of memory that users can ask for the single Dask worker",
value=REANA_DASK_CLUSTER_MAX_SINGLE_WORKER_MEMORY,
)
cluster_information["dask_cluster_max_number_of_workers"] = dict(

Check warning on line 330 in reana_server/rest/info.py

View check run for this annotation

Codecov / codecov/patch

reana_server/rest/info.py#L330

Added line #L330 was not covered by tests
title="The maximum number of workers that users can ask for the single Dask cluster",
value=REANA_DASK_CLUSTER_MAX_NUMBER_OF_WORKERS,
)

return InfoSchema().dump(cluster_information)

Expand Down Expand Up @@ -366,3 +382,4 @@ class InfoSchema(Schema):
dask_cluster_max_memory_limit = fields.Nested(StringInfoValue)
dask_cluster_default_single_worker_memory = fields.Nested(StringInfoValue)
dask_cluster_max_single_worker_memory = fields.Nested(StringInfoValue)
dask_cluster_max_number_of_workers = fields.Nested(StringInfoValue)
7 changes: 7 additions & 0 deletions reana_server/validation.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
DASK_ENABLED,
REANA_DASK_CLUSTER_MAX_MEMORY_LIMIT,
REANA_DASK_CLUSTER_DEFAULT_NUMBER_OF_WORKERS,
REANA_DASK_CLUSTER_MAX_NUMBER_OF_WORKERS,
REANA_DASK_CLUSTER_DEFAULT_SINGLE_WORKER_MEMORY,
REANA_DASK_CLUSTER_MAX_SINGLE_WORKER_MEMORY,
)
Expand Down Expand Up @@ -188,6 +189,12 @@ def validate_dask_memory_and_cores_limits(reana_yaml: Dict) -> None:
"number_of_workers", REANA_DASK_CLUSTER_DEFAULT_NUMBER_OF_WORKERS
)
)

if number_of_workers > REANA_DASK_CLUSTER_MAX_NUMBER_OF_WORKERS:
raise REANAValidationError(

Check warning on line 194 in reana_server/validation.py

View check run for this annotation

Codecov / codecov/patch

reana_server/validation.py#L193-L194

Added lines #L193 - L194 were not covered by tests
f"The number of requested Dask workers ({number_of_workers}) exceeds the maximum limit ({REANA_DASK_CLUSTER_MAX_NUMBER_OF_WORKERS})."
)

requested_dask_cluster_memory = (
kubernetes_memory_to_bytes(single_worker_memory) * number_of_workers
)
Expand Down

0 comments on commit d36b59b

Please sign in to comment.