From f653193443dbd66f692f516b6a930904b563811a Mon Sep 17 00:00:00 2001 From: remoteeng00 Date: Tue, 21 Feb 2023 16:21:55 +0700 Subject: [PATCH] Allow location_id = None for PrevalenceAllLineagesByLocationHandler --- .../v2/genomics/prevalence_all_lineages_by_location.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/web/handlers/v2/genomics/prevalence_all_lineages_by_location.py b/web/handlers/v2/genomics/prevalence_all_lineages_by_location.py index 2358834e..3c4318fe 100644 --- a/web/handlers/v2/genomics/prevalence_all_lineages_by_location.py +++ b/web/handlers/v2/genomics/prevalence_all_lineages_by_location.py @@ -15,10 +15,11 @@ class PrevalenceAllLineagesByLocationHandler(BaseHandler): + # size = 100 # If size=1000 it will raise too_many_buckets_exception in case missing location_id in query. name = "prevalence-by-location-all-lineages" kwargs = dict(BaseHandler.kwargs) kwargs["GET"] = { - "location_id": {"type": str, "required": True}, + "location_id": {"type": str, "default": None}, "window": {"type": int, "default": None, "min": 1}, "other_threshold": {"type": float, "default": 0.05, "min": 0, "max": 1}, "nday_threshold": {"type": int, "default": 10, "min": 1}, @@ -42,7 +43,6 @@ async def _get(self): query_cumulative = self.args.cumulative query = { "size": 0, - "query": {}, "aggs": { "count": { "terms": {"field": "date_collected", "size": self.size}, @@ -56,7 +56,9 @@ async def _get(self): date_range_filter = create_date_range_filter( "date_collected", self.args.min_date, self.args.max_date ) - query["query"] = parse_time_window_to_query(date_range_filter, query_obj=query_obj) + query_obj = parse_time_window_to_query(date_range_filter, query_obj=query_obj) + if query_obj: + query["query"] = query_obj # import json # print(json.dumps(query)) resp = await self.asynchronous_fetch(query)