From 91d526165e0872ed9938eeaaedcc33242a5f0993 Mon Sep 17 00:00:00 2001 From: Siddardh Ramesh <30310381+siddardh-ra@users.noreply.github.com> Date: Wed, 4 May 2022 18:42:25 +0530 Subject: [PATCH] - Move abort() call to top-level API handling (#2730) PBENCH-202 Move all abort() calls to top-level API handling in ApiBase._dispatch() to be sure that Flask's abort exception isn't inadvertently converted to an INTERNAL_SERVER_ERROR via an except Exception clause. --- lib/pbench/server/api/resources/__init__.py | 31 +++++++++++ .../server/api/resources/datasets_list.py | 4 +- .../server/api/resources/datasets_metadata.py | 20 +++---- .../api/resources/query_apis/__init__.py | 55 ++++++++----------- .../resources/query_apis/datasets/__init__.py | 29 +++++----- .../query_apis/datasets/datasets_contents.py | 9 --- .../query_apis/datasets/datasets_mappings.py | 10 ++-- .../query_apis/datasets/namespace_and_rows.py | 19 +------ .../resources/query_apis/datasets_detail.py | 10 ++-- .../query_apis/test_datasets_contents.py | 15 +++++ .../server/query_apis/test_datasets_delete.py | 4 +- .../query_apis/test_datasets_mappings.py | 2 +- .../query_apis/test_datasets_publish.py | 4 +- .../query_apis/test_namespace_and_rows.py | 6 +- 14 files changed, 112 insertions(+), 106 deletions(-) diff --git a/lib/pbench/server/api/resources/__init__.py b/lib/pbench/server/api/resources/__init__.py index 1754c9626f..36c78c9631 100644 --- a/lib/pbench/server/api/resources/__init__.py +++ b/lib/pbench/server/api/resources/__init__.py @@ -221,6 +221,19 @@ def __str__(self): return f"Unrecognized list value{'s' if len(self.bad) > 1 else ''} {self.bad!r} given for parameter {self.parameter.name}; expected {expected}" +class APIAbort(Exception): + """ + Used to report an error and abort if there is a failure in processing of API request. + """ + + def __init__(self, http_status: int, message: str = None): + self.http_status = http_status + self.message = message if message else HTTPStatus(http_status).phrase + + def __str__(self) -> str: + return f"API error {self.http_status} : {self.message}" + + def convert_date(value: str, _) -> datetime: """ Convert a date/time string to a datetime.datetime object. @@ -1041,6 +1054,15 @@ def _dispatch( except SchemaError as e: self.logger.exception("{}: SchemaError {}", api_name, e) abort(e.http_status, message=str(e)) + except APIAbort as e: + self.logger.exception("{} {}", self.__class__.__name__, e) + abort(e.http_status, message=e.message) + except Exception as e: + self.logger.exception("{} API error: {}", self.__class__.__name__, e) + abort( + HTTPStatus.INTERNAL_SERVER_ERROR, + message=HTTPStatus.INTERNAL_SERVER_ERROR.phrase, + ) try: json_data = request.get_json() @@ -1095,6 +1117,15 @@ def _dispatch( except SchemaError as e: self.logger.exception("{}: SchemaError {}", api_name, e) abort(e.http_status, message=str(e)) + except APIAbort as e: + self.logger.exception("{} {}", self.__class__.__name__, e) + abort(e.http_status, message=e.message) + except Exception as e: + self.logger.exception("{} API error: {}", self.__class__.__name__, e) + abort( + HTTPStatus.INTERNAL_SERVER_ERROR, + message=HTTPStatus.INTERNAL_SERVER_ERROR.phrase, + ) def _get(self, json_data: JSON, request: Request) -> Response: """ diff --git a/lib/pbench/server/api/resources/datasets_list.py b/lib/pbench/server/api/resources/datasets_list.py index 24c14223c8..117a204e61 100644 --- a/lib/pbench/server/api/resources/datasets_list.py +++ b/lib/pbench/server/api/resources/datasets_list.py @@ -5,10 +5,10 @@ from pbench.server import PbenchServerConfig, JSON from pbench.server.api.resources import ( - ApiBase, API_OPERATION, - Parameter, + ApiBase, ParamType, + Parameter, Schema, ) from pbench.server.database.database import Database diff --git a/lib/pbench/server/api/resources/datasets_metadata.py b/lib/pbench/server/api/resources/datasets_metadata.py index 91894ccbe4..abda414844 100644 --- a/lib/pbench/server/api/resources/datasets_metadata.py +++ b/lib/pbench/server/api/resources/datasets_metadata.py @@ -3,14 +3,14 @@ from flask.json import jsonify from flask.wrappers import Request, Response -from flask_restful import abort from pbench.server import JSONOBJECT, PbenchServerConfig from pbench.server.api.resources import ( - ApiBase, + APIAbort, API_OPERATION, - Parameter, + ApiBase, ParamType, + Parameter, Schema, ) from pbench.server.database.models.datasets import ( @@ -77,9 +77,9 @@ def _get(self, _, request: Request) -> Response: try: metadata = self._get_metadata(name, keys) except DatasetNotFound: - abort(HTTPStatus.BAD_REQUEST, message=f"Dataset {name} not found") + raise APIAbort(HTTPStatus.BAD_REQUEST, f"Dataset {name} not found") except MetadataError as e: - abort(HTTPStatus.BAD_REQUEST, message=str(e)) + raise APIAbort(HTTPStatus.BAD_REQUEST, str(e)) return jsonify(metadata) @@ -120,9 +120,8 @@ def _put(self, json_data: JSONOBJECT, _) -> Response: dataset = Dataset.query(name=name) except DatasetError as e: self.logger.warning("Dataset {} not found: {}", name, str(e)) - abort( - HTTPStatus.BAD_REQUEST, - message=f"Dataset {json_data['name']} not found", + raise APIAbort( + HTTPStatus.BAD_REQUEST, f"Dataset {json_data['name']} not found" ) failures = [] @@ -133,9 +132,6 @@ def _put(self, json_data: JSONOBJECT, _) -> Response: self.logger.warning("Unable to update key {} = {!r}: {}", k, v, str(e)) failures.append(k) if failures: - abort( - HTTPStatus.INTERNAL_SERVER_ERROR, - message=f"Unable to update metadata keys {','.join(failures)}", - ) + raise APIAbort(HTTPStatus.INTERNAL_SERVER_ERROR) results = self._get_metadata(name, list(metadata.keys())) return jsonify(results) diff --git a/lib/pbench/server/api/resources/query_apis/__init__.py b/lib/pbench/server/api/resources/query_apis/__init__.py index 1a66de3618..53f61f9bf5 100644 --- a/lib/pbench/server/api/resources/query_apis/__init__.py +++ b/lib/pbench/server/api/resources/query_apis/__init__.py @@ -11,12 +11,12 @@ from dateutil.relativedelta import relativedelta from elasticsearch import Elasticsearch, helpers, VERSION from flask.wrappers import Response -from flask_restful import abort import requests from pbench.server import PbenchServerConfig, JSON from pbench.server.api.auth import Auth from pbench.server.api.resources import ( + APIAbort, API_OPERATION, ApiBase, Schema, @@ -389,14 +389,10 @@ def _call(self, method: Callable, json_data: JSON): return "", HTTPStatus.NO_CONTENT except UnauthorizedAccess as e: self.logger.warning("{}", e) - abort(e.http_status, message=str(e)) + raise APIAbort(e.http_status, str(e)) except KeyError as e: self.logger.exception("{} problem in preprocess, missing {}", klasname, e) - abort(HTTPStatus.INTERNAL_SERVER_ERROR, message="INTERNAL ERROR") - except Exception as e: - self.logger.exception("{} preprocess failed: {}", klasname, e) - abort(HTTPStatus.INTERNAL_SERVER_ERROR, message="INTERNAL ERROR") - + raise APIAbort(HTTPStatus.INTERNAL_SERVER_ERROR) try: # prepare payload for Elasticsearch query es_request = self.assemble(json_data, context) @@ -409,7 +405,7 @@ def _call(self, method: Callable, json_data: JSON): ) except Exception as e: self.logger.exception("{} assembly failed: {}", klasname, e) - abort(HTTPStatus.INTERNAL_SERVER_ERROR, message="INTERNAL ERROR") + raise APIAbort(HTTPStatus.INTERNAL_SERVER_ERROR) try: # perform the Elasticsearch query @@ -428,38 +424,37 @@ def _call(self, method: Callable, json_data: JSON): e, es_request, ) - abort( + raise APIAbort( HTTPStatus.BAD_GATEWAY, - message=f"Elasticsearch query failure {e.response.reason} ({e.response.status_code})", + f"Elasticsearch query failure {e.response.reason} ({e.response.status_code})", ) except requests.exceptions.ConnectionError: self.logger.exception( "{}: connection refused during the Elasticsearch request", klasname ) - abort( - HTTPStatus.BAD_GATEWAY, - message="Network problem, could not reach Elasticsearch", + raise APIAbort( + HTTPStatus.BAD_GATEWAY, "Network problem, could not reach Elasticsearch" ) except requests.exceptions.Timeout: self.logger.exception( "{}: connection timed out during the Elasticsearch request", klasname ) - abort( + raise APIAbort( HTTPStatus.GATEWAY_TIMEOUT, - message="Connection timed out, could reach Elasticsearch", + "Connection timed out, could reach Elasticsearch", ) except requests.exceptions.InvalidURL: self.logger.exception( "{}: invalid url {} during the Elasticsearch request", klasname, url ) - abort(HTTPStatus.INTERNAL_SERVER_ERROR, message="INTERNAL ERROR") + raise APIAbort(HTTPStatus.INTERNAL_SERVER_ERROR) except Exception as e: self.logger.exception( "{}: exception {} occurred during the Elasticsearch request", klasname, type(e).__name__, ) - abort(HTTPStatus.INTERNAL_SERVER_ERROR, message="INTERNAL ERROR") + raise APIAbort(HTTPStatus.INTERNAL_SERVER_ERROR) try: # postprocess Elasticsearch response @@ -467,12 +462,11 @@ def _call(self, method: Callable, json_data: JSON): except PostprocessError as e: msg = f"{klasname}: the query postprocessor was unable to complete: {e}" self.logger.warning("{}", msg) - abort(e.status, message=msg, data=e.data) + raise APIAbort(e.status, str(e.message)) except KeyError as e: self.logger.error("{}: missing Elasticsearch key {}", klasname, e) - abort( - HTTPStatus.INTERNAL_SERVER_ERROR, - message=f"Missing Elasticsearch key {e}", + raise APIAbort( + HTTPStatus.INTERNAL_SERVER_ERROR, f"Missing Elasticsearch key {e}" ) except Exception as e: self.logger.exception( @@ -481,7 +475,7 @@ def _call(self, method: Callable, json_data: JSON): json_response, e, ) - abort(HTTPStatus.INTERNAL_SERVER_ERROR, message="INTERNAL ERROR") + raise APIAbort(HTTPStatus.INTERNAL_SERVER_ERROR) def _post(self, json_data: JSON, _) -> Response: """ @@ -627,14 +621,14 @@ def _post(self, json_data: JSON, _) -> Response: try: dataset = Dataset.query(name=json_data["name"]) except DatasetNotFound as e: - abort(HTTPStatus.NOT_FOUND, message=str(e)) + raise APIAbort(HTTPStatus.NOT_FOUND, str(e)) owner = User.query(id=dataset.owner_id) if not owner: self.logger.error( "Dataset owner ID {} cannot be found in Users", dataset.owner_id ) - abort(HTTPStatus.INTERNAL_SERVER_ERROR, message="Dataset owner not found") + raise APIAbort(HTTPStatus.INTERNAL_SERVER_ERROR) # For bulk Elasticsearch operations, we check authorization against the # ownership of a designated dataset rather than having an explicit @@ -646,7 +640,7 @@ def _post(self, json_data: JSON, _) -> Response: try: self._check_authorization(owner.username, dataset.access) except UnauthorizedAccess as e: - abort(e.http_status, message=str(e)) + raise APIAbort(e.http_status, str(e)) # Build an Elasticsearch instance to manage the bulk update elastic = Elasticsearch(self.elastic_uri) @@ -734,7 +728,7 @@ def _post(self, json_data: JSON, _) -> Response: type(e).__name__, report, ) - abort(HTTPStatus.INTERNAL_SERVER_ERROR, message="INTERNAL ERROR") + raise APIAbort(HTTPStatus.INTERNAL_SERVER_ERROR) summary = {"ok": count - error_count, "failure": error_count} @@ -747,7 +741,7 @@ def _post(self, json_data: JSON, _) -> Response: klasname, type(e).__name__, ) - abort(HTTPStatus.INTERNAL_SERVER_ERROR, message="INTERNAL ERROR") + raise APIAbort(HTTPStatus.INTERNAL_SERVER_ERROR) # Return the summary document as the success response, or abort with an # internal error if we weren't 100% successful. Some elasticsearch @@ -766,12 +760,7 @@ def _post(self, json_data: JSON, _) -> Response: error_count, json.dumps(report), ) - abort( - HTTPStatus.INTERNAL_SERVER_ERROR, - message=f"{error_count:d} of {count:d} Elasticsearch document actions failed", - data=summary, - ) - + raise APIAbort(HTTPStatus.INTERNAL_SERVER_ERROR, summary) self.logger.info( "{}:dataset {}: {} successful document actions", klasname, dataset, count ) diff --git a/lib/pbench/server/api/resources/query_apis/datasets/__init__.py b/lib/pbench/server/api/resources/query_apis/datasets/__init__.py index 112f3fe8a9..b042803391 100644 --- a/lib/pbench/server/api/resources/query_apis/datasets/__init__.py +++ b/lib/pbench/server/api/resources/query_apis/datasets/__init__.py @@ -2,10 +2,9 @@ from logging import Logger from typing import AnyStr, Union, List -from flask_restful import abort - from pbench.server import PbenchServerConfig, JSON -from pbench.server.api.resources import Schema, SchemaError +from pbench.server.api.resources import APIAbort, Schema, SchemaError + from pbench.server.api.resources.query_apis import CONTEXT, ElasticBase from pbench.server.database.models.datasets import ( Dataset, @@ -92,6 +91,9 @@ def preprocess(self, client_json: JSON) -> CONTEXT: object and run_id as JSON CONTEXT so that the postprocess operations can use it to identify the index to be searched from document index metadata. + + Raises: + APIAbort: input can't be validated or normalized """ run_id = client_json["run_id"] @@ -99,16 +101,15 @@ def preprocess(self, client_json: JSON) -> CONTEXT: try: dataset = Dataset.query(md5=run_id) except DatasetNotFound: - self.logger.debug(f"Dataset with Run ID {run_id!r} not found") - abort(HTTPStatus.NOT_FOUND, message="Dataset not found") - + raise APIAbort( + HTTPStatus.NOT_FOUND, f"No datasets with Run ID '{run_id!r}' found." + ) owner = User.query(id=dataset.owner_id) if not owner: self.logger.error( f"Dataset owner ID { dataset.owner_id!r} cannot be found in Users" ) - abort(HTTPStatus.INTERNAL_SERVER_ERROR, message="Dataset owner not found") - + raise APIAbort(HTTPStatus.INTERNAL_SERVER_ERROR) # We check authorization against the ownership of the dataset that # was selected rather than having an explicit "user" # JSON parameter. This will raise UnauthorizedAccess on failure. @@ -125,15 +126,13 @@ def get_index(self, dataset: Dataset, root_index_name: AnyStr) -> AnyStr: """ try: index_map = Metadata.getvalue(dataset=dataset, key=Metadata.INDEX_MAP) - except MetadataError as e: - self.logger.error(f"Indices from metadata table not found {e!r}") - abort(HTTPStatus.INTERNAL_SERVER_ERROR, message="INTERNAL ERROR") + except MetadataError as exc: + self.logger.error("{}", str(exc)) + raise APIAbort(HTTPStatus.INTERNAL_SERVER_ERROR) if index_map is None: - self.logger.error( - f"server.index-map not found in Metadata for a dataset {dataset!r}" - ) - abort(HTTPStatus.INTERNAL_SERVER_ERROR, message="INTERNAL ERROR") + self.logger.error("Index map metadata has no value") + raise APIAbort(HTTPStatus.INTERNAL_SERVER_ERROR) index_keys = [key for key in index_map if root_index_name in key] indices = ",".join(index_keys) diff --git a/lib/pbench/server/api/resources/query_apis/datasets/datasets_contents.py b/lib/pbench/server/api/resources/query_apis/datasets/datasets_contents.py index b721702313..90e3e0b118 100644 --- a/lib/pbench/server/api/resources/query_apis/datasets/datasets_contents.py +++ b/lib/pbench/server/api/resources/query_apis/datasets/datasets_contents.py @@ -1,8 +1,6 @@ from http import HTTPStatus from logging import Logger -from flask_restful import abort - from pbench.server import PbenchServerConfig from pbench.server.api.resources import ( JSON, @@ -65,14 +63,7 @@ def assemble(self, json_data: JSON, context: CONTEXT) -> JSON: # Retrieve the ES indices that belong to this run_id from the metadata # table - indices = self.get_index(dataset, "run-toc") - if not indices: - self.logger.debug( - f"Found no indices matching the prefix run-toc" - f" for a dataset {dataset!r}" - ) - abort(HTTPStatus.NOT_FOUND, message="Found no matching indices") return { "path": f"/{indices}/_search", diff --git a/lib/pbench/server/api/resources/query_apis/datasets/datasets_mappings.py b/lib/pbench/server/api/resources/query_apis/datasets/datasets_mappings.py index 6e78b74b7a..c15efd436f 100644 --- a/lib/pbench/server/api/resources/query_apis/datasets/datasets_mappings.py +++ b/lib/pbench/server/api/resources/query_apis/datasets/datasets_mappings.py @@ -3,14 +3,14 @@ from flask import jsonify from flask.wrappers import Request, Response -from flask_restful import abort from pbench.server import PbenchServerConfig, JSON from pbench.server.api.resources import ( + APIAbort, ApiBase, - Schema, - Parameter, ParamType, + Parameter, + Schema, SchemaError, ) @@ -99,7 +99,7 @@ def _get(self, json_data: JSON, request: Request) -> Response: try: self.schema.validate(json_data) except SchemaError as e: - abort(HTTPStatus.BAD_REQUEST, message=str(e)) + raise APIAbort(HTTPStatus.BAD_REQUEST, str(e)) index = RunIdBase.ES_INTERNAL_INDEX_NAMES[json_data["dataset_view"]] try: @@ -120,4 +120,4 @@ def _get(self, json_data: JSON, request: Request) -> Response: self.logger.exception( "Document template {} not found in the database.", index_name ) - abort(HTTPStatus.INTERNAL_SERVER_ERROR, message="INTERNAL ERROR") + raise APIAbort(HTTPStatus.INTERNAL_SERVER_ERROR) diff --git a/lib/pbench/server/api/resources/query_apis/datasets/namespace_and_rows.py b/lib/pbench/server/api/resources/query_apis/datasets/namespace_and_rows.py index 7bdd277a22..3e59278a5b 100644 --- a/lib/pbench/server/api/resources/query_apis/datasets/namespace_and_rows.py +++ b/lib/pbench/server/api/resources/query_apis/datasets/namespace_and_rows.py @@ -2,10 +2,9 @@ from logging import Logger from flask import Response, jsonify -from flask_restful import abort from pbench.server import PbenchServerConfig, JSON -from pbench.server.api.resources import Parameter, ParamType, Schema +from pbench.server.api.resources import APIAbort, ParamType, Parameter, Schema from pbench.server.api.resources.query_apis import CONTEXT, PostprocessError from pbench.server.api.resources.query_apis.datasets import RunIdBase from pbench.server.database.models.template import TemplateNotFound @@ -68,12 +67,6 @@ def assemble(self, json_data: JSON, context: CONTEXT) -> JSON: # Retrieve the ES indices that belong to this run_id from the metadata # table indices = self.get_index(dataset, document_index) - if not indices: - self.logger.debug( - f"Found no indices matching the prefix {document_index!r}" - f"for a dataset {dataset!r}" - ) - abort(HTTPStatus.NOT_FOUND, message="Found no matching indices") try: mappings = self.get_mappings(document) @@ -81,7 +74,7 @@ def assemble(self, json_data: JSON, context: CONTEXT) -> JSON: self.logger.exception( f"Document template {document_index!r} not found in the database." ) - abort(HTTPStatus.INTERNAL_SERVER_ERROR, message="Mapping not found") + raise APIAbort(HTTPStatus.INTERNAL_SERVER_ERROR) result = self.get_aggregatable_fields(mappings) @@ -270,12 +263,6 @@ def assemble(self, json_data: JSON, context: CONTEXT) -> JSON: # Retrieve the ES indices that belong to this run_id indices = self.get_index(dataset, document_index) - if not indices: - self.logger.debug( - f"Found no indices matching the prefix {document_index!r}" - f"for a dataset {dataset!r}" - ) - abort(HTTPStatus.NOT_FOUND, message="Found no matching indices") try: mappings = self.get_mappings(document) @@ -283,7 +270,7 @@ def assemble(self, json_data: JSON, context: CONTEXT) -> JSON: self.logger.exception( f"Document template {document_index!r} not found in the database." ) - abort(HTTPStatus.INTERNAL_SERVER_ERROR, message="Mapping not found") + raise APIAbort(HTTPStatus.INTERNAL_SERVER_ERROR) # Prepare list of filters to apply for ES query es_filter = [{"match": {"run.id": run_id}}] diff --git a/lib/pbench/server/api/resources/query_apis/datasets_detail.py b/lib/pbench/server/api/resources/query_apis/datasets_detail.py index 32d3e5710e..c923a2e709 100644 --- a/lib/pbench/server/api/resources/query_apis/datasets_detail.py +++ b/lib/pbench/server/api/resources/query_apis/datasets_detail.py @@ -1,10 +1,9 @@ from http import HTTPStatus from flask import jsonify -from flask_restful import abort from logging import Logger from pbench.server import PbenchServerConfig, JSON -from pbench.server.api.resources import Schema, Parameter, ParamType +from pbench.server.api.resources import APIAbort, ParamType, Parameter, Schema from pbench.server.api.resources.query_apis import ( CONTEXT, ElasticBase, @@ -158,12 +157,11 @@ def postprocess(self, es_json: JSON, context: CONTEXT) -> JSON: try: m = self._get_metadata(src["run"]["name"], context["metadata"]) except DatasetNotFound: - abort( - HTTPStatus.BAD_REQUEST, - message=f"Dataset {src['run']['name']} not found", + raise APIAbort( + HTTPStatus.BAD_REQUEST, f"Dataset {src['run']['name']} not found" ) except MetadataError as e: - abort(HTTPStatus.BAD_REQUEST, message=str(e)) + raise APIAbort(HTTPStatus.BAD_REQUEST, str(e)) if m: result["serverMetadata"] = m diff --git a/lib/pbench/test/unit/server/query_apis/test_datasets_contents.py b/lib/pbench/test/unit/server/query_apis/test_datasets_contents.py index 909df79de6..ff9f51418f 100644 --- a/lib/pbench/test/unit/server/query_apis/test_datasets_contents.py +++ b/lib/pbench/test/unit/server/query_apis/test_datasets_contents.py @@ -472,3 +472,18 @@ def test_get_index(self, attach_dataset, provide_metadata): drb = Dataset.query(name="drb") indices = self.cls_obj.get_index(drb, self.index_from_metadata) assert indices == "unit-test.v6.run-toc.2020-05" + + @pytest.mark.parametrize("run_id", ("wrong", "", None)) + def test_missing_run_id(self, client, server_config, pbench_token, run_id): + if run_id is None: + del self.payload["run_id"] + expected_status = HTTPStatus.BAD_REQUEST + else: + self.payload["run_id"] = run_id + expected_status = HTTPStatus.NOT_FOUND + response = client.post( + f"{server_config.rest_uri}{self.pbench_endpoint}", + headers={"Authorization": "Bearer " + pbench_token}, + json=self.payload, + ) + assert response.status_code == expected_status diff --git a/lib/pbench/test/unit/server/query_apis/test_datasets_delete.py b/lib/pbench/test/unit/server/query_apis/test_datasets_delete.py index e677afc4ea..b3371054f6 100644 --- a/lib/pbench/test/unit/server/query_apis/test_datasets_delete.py +++ b/lib/pbench/test/unit/server/query_apis/test_datasets_delete.py @@ -178,7 +178,7 @@ def test_partial( # Verify the report and status assert response.status_code == HTTPStatus.INTERNAL_SERVER_ERROR - assert response.json["data"] == {"ok": 28, "failure": 3} + assert response.json["message"] == {"ok": 28, "failure": 3} assert ( "pbench.server.api", ERROR, @@ -235,4 +235,4 @@ def fake_bulk( # Verify the failure assert response.status_code == HTTPStatus.INTERNAL_SERVER_ERROR - assert response.json["message"] == "INTERNAL ERROR" + assert response.json["message"] == HTTPStatus.INTERNAL_SERVER_ERROR.phrase diff --git a/lib/pbench/test/unit/server/query_apis/test_datasets_mappings.py b/lib/pbench/test/unit/server/query_apis/test_datasets_mappings.py index 19ef39bddb..2317eacfbb 100644 --- a/lib/pbench/test/unit/server/query_apis/test_datasets_mappings.py +++ b/lib/pbench/test/unit/server/query_apis/test_datasets_mappings.py @@ -127,4 +127,4 @@ def test_with_db_error(self, client, server_config, database_error): with client: response = client.get(f"{server_config.rest_uri}/datasets/mappings/summary") assert response.status_code == HTTPStatus.INTERNAL_SERVER_ERROR - assert response.json["message"] == "Internal Server Error" + assert response.json["message"] == HTTPStatus.INTERNAL_SERVER_ERROR.phrase diff --git a/lib/pbench/test/unit/server/query_apis/test_datasets_publish.py b/lib/pbench/test/unit/server/query_apis/test_datasets_publish.py index 2fbdd2530c..83f369f204 100644 --- a/lib/pbench/test/unit/server/query_apis/test_datasets_publish.py +++ b/lib/pbench/test/unit/server/query_apis/test_datasets_publish.py @@ -159,7 +159,7 @@ def test_partial( # Verify the report and status assert response.status_code == HTTPStatus.INTERNAL_SERVER_ERROR - assert response.json["data"] == {"ok": 28, "failure": 3} + assert response.json["message"] == {"ok": 28, "failure": 3} assert ( "pbench.server.api", ERROR, @@ -217,4 +217,4 @@ def fake_bulk( # Verify the failure assert response.status_code == HTTPStatus.INTERNAL_SERVER_ERROR - assert response.json["message"] == "INTERNAL ERROR" + assert response.json["message"] == HTTPStatus.INTERNAL_SERVER_ERROR.phrase diff --git a/lib/pbench/test/unit/server/query_apis/test_namespace_and_rows.py b/lib/pbench/test/unit/server/query_apis/test_namespace_and_rows.py index 7fe6045437..a095a92f73 100644 --- a/lib/pbench/test/unit/server/query_apis/test_namespace_and_rows.py +++ b/lib/pbench/test/unit/server/query_apis/test_namespace_and_rows.py @@ -1,7 +1,7 @@ from http import HTTPStatus import pytest -from werkzeug.exceptions import InternalServerError +from pbench.server.api.resources import APIAbort from pbench.server.api.resources.query_apis.datasets.namespace_and_rows import ( SampleNamespace, @@ -764,9 +764,9 @@ def test_exceptions_on_get_index(self, attach_dataset): test = Dataset.query(name="test") # When server index_map is None we expect 500 - with pytest.raises(InternalServerError) as exc: + with pytest.raises(APIAbort) as exc: self.cls_obj.get_index(test, self.index_from_metadata) - assert exc.value.code == HTTPStatus.INTERNAL_SERVER_ERROR + assert exc.value.http_status == HTTPStatus.INTERNAL_SERVER_ERROR Metadata.setvalue( dataset=test,