Skip to content

Commit

Permalink
check values in set instead of list
Browse files Browse the repository at this point in the history
checking a set should be faster than checking a list
  • Loading branch information
blankdots committed Feb 15, 2022
1 parent 04d338d commit d57eac1
Show file tree
Hide file tree
Showing 8 changed files with 34 additions and 30 deletions.
10 changes: 5 additions & 5 deletions metadata_backend/api/handlers/folder.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,11 +41,11 @@ def _check_patch_folder(self, patch_ops: Any) -> None:
for op in patch_ops:
if _tags.match(op["path"]):
LOG.info(f"{op['op']} on tags in folder")
if "submissionType" in op["value"].keys() and op["value"]["submissionType"] not in [
if "submissionType" in op["value"].keys() and op["value"]["submissionType"] not in {
"XML",
"CSV",
"Form",
]:
}:
reason = "submissionType is restricted to either 'CSV', 'XML' or 'Form' values."
LOG.error(reason)
raise web.HTTPBadRequest(reason=reason)
Expand All @@ -55,7 +55,7 @@ def _check_patch_folder(self, patch_ops: Any) -> None:
reason = f"Request contains '{op['path']}' key that cannot be updated to folders."
LOG.error(reason)
raise web.HTTPBadRequest(reason=reason)
if op["op"] in ["remove", "copy", "test", "move"]:
if op["op"] in {"remove", "copy", "test", "move"}:
reason = f"{op['op']} on {op['path']} is not allowed."
LOG.error(reason)
raise web.HTTPUnauthorized(reason=reason)
Expand All @@ -73,7 +73,7 @@ def _check_patch_folder(self, patch_ops: Any) -> None:
if (
"tags" in item
and "submissionType" in item["tags"]
and item["tags"]["submissionType"] not in ["XML", "Form"]
and item["tags"]["submissionType"] not in {"XML", "CSV", "Form"}
):
reason = "submissionType is restricted to either 'XML' or 'Form' values."
LOG.error(reason)
Expand All @@ -98,7 +98,7 @@ async def get_folders(self, req: Request) -> Response:
# Check if only published or draft folders are requestsed
if "published" in req.query:
pub_param = req.query.get("published", "").title()
if pub_param in ["True", "False"]:
if pub_param in {"True", "False"}:
folder_query["published"] = {"$eq": bool(strtobool(pub_param))}
else:
reason = "'published' parameter must be either 'true' or 'false'"
Expand Down
12 changes: 8 additions & 4 deletions metadata_backend/api/handlers/user.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,11 @@ def _check_patch_user(self, patch_ops: Any) -> None:
for op in patch_ops:
if _tags.match(op["path"]):
LOG.info(f"{op['op']} on tags in folder")
if "submissionType" in op["value"].keys() and op["value"]["submissionType"] not in ["XML", "Form"]:
if "submissionType" in op["value"].keys() and op["value"]["submissionType"] not in {
"XML",
"CSV",
"Form",
}:
reason = "submissionType is restricted to either 'XML' or 'Form' values."
LOG.error(reason)
raise web.HTTPBadRequest(reason=reason)
Expand All @@ -46,7 +50,7 @@ def _check_patch_user(self, patch_ops: Any) -> None:
reason = f"Request contains '{op['path']}' key that cannot be updated to user object"
LOG.error(reason)
raise web.HTTPBadRequest(reason=reason)
if op["op"] in ["remove", "copy", "test", "move", "replace"]:
if op["op"] in {"remove", "copy", "test", "move", "replace"}:
reason = f"{op['op']} on {op['path']} is not allowed."
LOG.error(reason)
raise web.HTTPUnauthorized(reason=reason)
Expand All @@ -65,7 +69,7 @@ def _check_patch_user(self, patch_ops: Any) -> None:
if (
"tags" in item
and "submissionType" in item["tags"]
and item["tags"]["submissionType"] not in ["XML", "Form"]
and item["tags"]["submissionType"] not in {"XML", "CSV", "Form"}
):
reason = "submissionType is restricted to either 'XML' or 'Form' values."
LOG.error(reason)
Expand Down Expand Up @@ -187,7 +191,7 @@ async def _get_user_items(self, req: Request, user: Dict, item_type: str) -> Tup
:returns: Paginated list of user draft templates and link header
"""
# Check item_type parameter is not faulty
if item_type not in ["templates", "folders"]:
if item_type not in {"templates", "folders"}:
reason = f"{item_type} is a faulty item parameter. Should be either folders or templates"
LOG.error(reason)
raise web.HTTPBadRequest(reason=reason)
Expand Down
4 changes: 2 additions & 2 deletions metadata_backend/api/middlewares.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
def _check_error_page_requested(req: Request, error_code: int) -> web.Response: # type:ignore
"""Return the correct error page with correct status code."""
if "Accept" in req.headers and req.headers["Accept"]:
if req.headers["Accept"].split(",")[0] in ["text/html", "application/xhtml+xml"]:
if req.headers["Accept"].split(",")[0] in {"text/html", "application/xhtml+xml"}:
raise web.HTTPSeeOther(
f"/error{str(error_code)}",
headers={
Expand Down Expand Up @@ -115,7 +115,7 @@ async def check_login(request: Request, handler: Callable) -> StreamResponse:
if request.path.startswith(tuple(controlled_paths)) and "OIDC_URL" in os.environ and bool(os.getenv("OIDC_URL")):
cookie = decrypt_cookie(request)
session = request.app["Session"].setdefault(cookie["id"], {})
if not all(x in ["access_token", "user_info", "oidc_state"] for x in session):
if not all(x in {"access_token", "user_info", "oidc_state"} for x in session):
LOG.debug("checked session parameter")
response = web.HTTPSeeOther(f"{aai_config['domain']}/aai")
response.headers["Location"] = "/aai"
Expand Down
4 changes: 2 additions & 2 deletions metadata_backend/api/operators.py
Original file line number Diff line number Diff line change
Expand Up @@ -145,7 +145,7 @@ async def _insert_formatted_object_to_db(self, schema_type: str, data: Dict) ->
raise web.HTTPBadRequest(reason=reason)
if insert_success:
try:
title = data["descriptor"]["studyTitle"] if schema_type in ["study", "draft-study"] else data["title"]
title = data["descriptor"]["studyTitle"] if schema_type in {"study", "draft-study"} else data["title"]
except (TypeError, KeyError):
title = ""
return data["accessionId"], title
Expand Down Expand Up @@ -176,7 +176,7 @@ async def _replace_object_from_db(self, schema_type: str, accession_id: str, dat
raise web.HTTPBadRequest(reason=reason)
if replace_success:
try:
title = data["descriptor"]["studyTitle"] if schema_type in ["study", "draft-study"] else data["title"]
title = data["descriptor"]["studyTitle"] if schema_type in {"study", "draft-study"} else data["title"]
except (TypeError, KeyError):
title = ""
return accession_id, title
Expand Down
16 changes: 8 additions & 8 deletions metadata_backend/database/db_service.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,8 +35,8 @@ async def retry(*args: Any, **kwargs: Any) -> Any:
message = f"Connection to database failed after {attempt} tries"
raise ConnectionFailure(message=message)
LOG.error(
"Connection not successful, trying to reconnect."
f"Reconnection attempt number {attempt}, waiting for {default_timeout} seconds."
"Connection not successful, trying to reconnect. "
+ f"Reconnection attempt number {attempt}, waiting for {default_timeout} seconds."
)
continue

Expand Down Expand Up @@ -84,7 +84,7 @@ async def exists(self, collection: str, accession_id: str) -> bool:
:param accession_id: ID of the object/folder/user to be searched
:returns: True if exists and False if it does not
"""
id_key = f"{collection}Id" if (collection in ["folder", "user"]) else "accessionId"
id_key = f"{collection}Id" if (collection in {"folder", "user"}) else "accessionId"
projection = {"_id": False, "externalId": False} if collection == "user" else {"_id": False}
find_by_id = {id_key: accession_id}
exists = await self.database[collection].find_one(find_by_id, projection)
Expand Down Expand Up @@ -124,7 +124,7 @@ async def read(self, collection: str, accession_id: str) -> Dict:
:param accession_id: ID of the object/folder/user to be searched
:returns: First document matching the accession_id
"""
id_key = f"{collection}Id" if (collection in ["folder", "user"]) else "accessionId"
id_key = f"{collection}Id" if (collection in {"folder", "user"}) else "accessionId"
projection = {"_id": False, "eppn": False} if collection == "user" else {"_id": False}
find_by_id = {id_key: accession_id}
LOG.debug(f"DB doc in {collection} read for {accession_id}.")
Expand Down Expand Up @@ -160,7 +160,7 @@ async def update(self, collection: str, accession_id: str, data_to_be_updated: D
updated to object, can replace previous fields and add new ones.
:returns: True if operation was successful
"""
id_key = f"{collection}Id" if (collection in ["folder", "user"]) else "accessionId"
id_key = f"{collection}Id" if (collection in {"folder", "user"}) else "accessionId"
find_by_id = {id_key: accession_id}
update_op = {"$set": data_to_be_updated}
result = await self.database[collection].update_one(find_by_id, update_op)
Expand All @@ -177,7 +177,7 @@ async def remove(self, collection: str, accession_id: str, data_to_be_removed: A
updated to removed.
:returns: True if operation was successful
"""
id_key = f"{collection}Id" if (collection in ["folder", "user"]) else "accessionId"
id_key = f"{collection}Id" if (collection in {"folder", "user"}) else "accessionId"
find_by_id = {id_key: accession_id}
remove_op = {"$pull": data_to_be_removed}
result = await self.database[collection].find_one_and_update(
Expand All @@ -196,7 +196,7 @@ async def append(self, collection: str, accession_id: str, data_to_be_addded: An
updated to removed.
:returns: True if operation was successful
"""
id_key = f"{collection}Id" if (collection in ["folder", "user"]) else "accessionId"
id_key = f"{collection}Id" if (collection in {"folder", "user"}) else "accessionId"
find_by_id = {id_key: accession_id}
# push vs addtoSet
# push allows us to specify the postion but it does not check the items are unique
Expand Down Expand Up @@ -241,7 +241,7 @@ async def delete(self, collection: str, accession_id: str) -> bool:
:param accession_id: ID for object/folder/user to be deleted
:returns: True if operation was successful
"""
id_key = f"{collection}Id" if (collection in ["folder", "user"]) else "accessionId"
id_key = f"{collection}Id" if (collection in {"folder", "user"}) else "accessionId"
find_by_id = {id_key: accession_id}
result = await self.database[collection].delete_one(find_by_id)
LOG.debug(f"DB doc in {collection} deleted for {accession_id}.")
Expand Down
14 changes: 7 additions & 7 deletions metadata_backend/helpers/parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ def _flatten(self, data: Any) -> Union[Dict, List, str, None]:
:param schema_type: XML data
:returns: XML element flattened.
"""
links = [
links = {
"studyLinks",
"sampleLinks",
"runLinks",
Expand All @@ -64,7 +64,7 @@ def _flatten(self, data: Any) -> Union[Dict, List, str, None]:
"datasetLinks",
"assemblyLinks",
"submissionLinks",
]
}

attrs = [
"studyAttributes",
Expand All @@ -82,14 +82,14 @@ def _flatten(self, data: Any) -> Union[Dict, List, str, None]:
"dataUses",
]

refs = ["analysisRef", "sampleRef", "runRef", "experimentRef"]
refs = {"analysisRef", "sampleRef", "runRef", "experimentRef"}

children: Any = self.dict()

for key, value, _ in self.map_content(data.content):
key = self._to_camel(key.lower())

if key in attrs and len(value) == 1:
if key in set(attrs) and len(value) == 1:
attrs = list(value.values())
children[key] = attrs[0] if isinstance(attrs[0], list) else attrs
continue
Expand All @@ -106,7 +106,7 @@ def _flatten(self, data: Any) -> Union[Dict, List, str, None]:
continue

if "assembly" in key:
if next(iter(value)) in ["standard", "custom"]:
if next(iter(value)) in {"standard", "custom"}:
children[key] = next(iter(value.values()))
if "accessionId" in children[key]:
children[key]["accession"] = children[key].pop("accessionId")
Expand Down Expand Up @@ -377,7 +377,7 @@ def parse(self, schema_type: str, content: str) -> List:
"""
csv_reader = csv.DictReader(StringIO(content), delimiter=",", quoting=csv.QUOTE_NONE)

_sample_list = [
_sample_list = {
"title",
"alias",
"description",
Expand All @@ -389,7 +389,7 @@ def parse(self, schema_type: str, content: str) -> List:
"cellLine",
"region",
"phenotype",
]
}

if (
csv_reader.fieldnames
Expand Down
2 changes: 1 addition & 1 deletion metadata_backend/helpers/schema_loader.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ def _identify_file(self, schema_type: str) -> Path:
"""
schema_type = schema_type.lower()
schema_file = None
for file in [x for x in self.path.iterdir()]:
for file in set([x for x in self.path.iterdir()]):
if schema_type in file.name and file.name.endswith(self.loader_type):
schema_file = file
break
Expand Down
2 changes: 1 addition & 1 deletion tests/integration/mongo_indexes.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ async def create_indexes(url: str) -> None:
db = client[DATABASE]
LOG.debug(f"Current database: {db}")
LOG.debug("=== Create collections ===")
for col in ["folder", "user"]:
for col in {"folder", "user"}:
try:
await db.create_collection(col)
except pymongo.errors.CollectionInvalid as e:
Expand Down

0 comments on commit d57eac1

Please sign in to comment.